GH-119866: Spill the stack around escaping calls. (GH-124392)

* Spill the evaluation around escaping calls in the generated interpreter and JIT. 

* The code generator tracks live, cached values so they can be saved to memory when needed.

* Spills the stack pointer around escaping calls, so that the exact stack is visible to the cycle GC.
This commit is contained in:
Mark Shannon 2024-10-07 14:56:39 +01:00 committed by GitHub
parent cda3b5a576
commit da071fa3e8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 3841 additions and 2034 deletions

View File

@ -251,6 +251,7 @@ typedef struct _special_method {
} _Py_SpecialMethod;
PyAPI_DATA(const _Py_SpecialMethod) _Py_SpecialMethods[];
PyAPI_DATA(const size_t) _Py_FunctionAttributeOffsets[];
PyAPI_FUNC(int) _PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right);
PyAPI_FUNC(int) _PyEval_CheckExceptTypeValid(PyThreadState *tstate, PyObject* right);
@ -274,6 +275,8 @@ PyAPI_FUNC(PyObject *) _PyEval_GetANext(PyObject *aiter);
PyAPI_FUNC(void) _PyEval_LoadGlobalStackRef(PyObject *globals, PyObject *builtins, PyObject *name, _PyStackRef *writeto);
PyAPI_FUNC(PyObject *) _PyEval_GetAwaitable(PyObject *iterable, int oparg);
PyAPI_FUNC(PyObject *) _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *name);
PyAPI_FUNC(int)
_Py_Check_ArgsIterable(PyThreadState *tstate, PyObject *func, PyObject *args);
/* Bits that can be set in PyThreadState.eval_breaker */
#define _PY_GIL_DROP_REQUEST_BIT (1U << 0)

View File

@ -483,8 +483,6 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
return 5;
case YIELD_VALUE:
return 1;
case _DO_CALL_FUNCTION_EX:
return 3 + (oparg & 1);
default:
return -1;
}
@ -944,8 +942,6 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
return 6;
case YIELD_VALUE:
return 1;
case _DO_CALL_FUNCTION_EX:
return 1;
default:
return -1;
}
@ -1022,7 +1018,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG },
[BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG },
[BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC, HAS_EXIT_FLAG },
@ -1030,7 +1026,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BINARY_SUBSCR_DICT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
[BINARY_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
[BINARY_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
[BINARY_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
[BINARY_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG },
@ -1079,7 +1075,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[CONTAINS_OP] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CONTAINS_OP_DICT] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CONTAINS_OP_SET] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[CONVERT_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[CONVERT_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[COPY] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_PURE_FLAG },
[COPY_FREE_VARS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
[DELETE_ATTR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -1114,8 +1110,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_CALL_FUNCTION_EX] = { true, INSTR_FMT_IX, 0 },
[INSTRUMENTED_CALL_KW] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG },
[INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG },
[INSTRUMENTED_END_FOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_END_SEND] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -1127,10 +1123,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[INSTRUMENTED_POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
[INSTRUMENTED_POP_JUMP_IF_TRUE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
[INSTRUMENTED_RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_RETURN_VALUE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[INSTRUMENTED_YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[INTERPRETER_EXIT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG },
[INTERPRETER_EXIT] = { true, INSTR_FMT_IX, 0 },
[IS_OP] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
[JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[JUMP_BACKWARD_NO_INTERRUPT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG },
@ -1140,7 +1136,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[LOAD_ATTR] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[LOAD_ATTR_CLASS] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_CLASS_WITH_METACLASS_CHECK] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG },
[LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG },
[LOAD_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_METHOD_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG },
@ -1148,7 +1144,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[LOAD_ATTR_MODULE] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[LOAD_ATTR_NONDESCRIPTOR_NO_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
[LOAD_ATTR_PROPERTY] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_SLOT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_ATTR_WITH_HINT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG },
[LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -1171,7 +1167,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[LOAD_SUPER_ATTR_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[LOAD_SUPER_ATTR_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[MAKE_CELL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_FREE_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG },
[MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[MAKE_FUNCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[MATCH_CLASS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[MATCH_KEYS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -1192,13 +1188,13 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[RESUME] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[RESUME_CHECK] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG },
[RETURN_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG },
[RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[RETURN_GENERATOR] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[RETURN_VALUE] = { true, INSTR_FMT_IX, 0 },
[SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[SEND] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[SEND_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[SETUP_ANNOTATIONS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[SET_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG },
[SET_FUNCTION_ATTRIBUTE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
[SET_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[STORE_ATTR] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[STORE_ATTR_INSTANCE_VALUE] = { true, INSTR_FMT_IXC000, HAS_EXIT_FLAG },
@ -1218,10 +1214,10 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[TO_BOOL] = { true, INSTR_FMT_IXC00, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[TO_BOOL_ALWAYS_TRUE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[TO_BOOL_BOOL] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
[TO_BOOL_INT] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[TO_BOOL_LIST] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[TO_BOOL_NONE] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
[TO_BOOL_STR] = { true, INSTR_FMT_IXC00, HAS_EXIT_FLAG },
[UNARY_INVERT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[UNARY_NEGATIVE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[UNARY_NOT] = { true, INSTR_FMT_IX, HAS_PURE_FLAG },
@ -1231,8 +1227,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[266] = {
[UNPACK_SEQUENCE_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[UNPACK_SEQUENCE_TWO_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[WITH_EXCEPT_START] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ESCAPES_FLAG },
[_DO_CALL_FUNCTION_EX] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[YIELD_VALUE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
[JUMP] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[JUMP_IF_FALSE] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[JUMP_IF_TRUE] = { true, -1, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@ -1661,7 +1656,6 @@ const char *_PyOpcode_OpName[266] = {
[UNPACK_SEQUENCE_TWO_TUPLE] = "UNPACK_SEQUENCE_TWO_TUPLE",
[WITH_EXCEPT_START] = "WITH_EXCEPT_START",
[YIELD_VALUE] = "YIELD_VALUE",
[_DO_CALL_FUNCTION_EX] = "_DO_CALL_FUNCTION_EX",
};
#endif
@ -1908,12 +1902,12 @@ const uint8_t _PyOpcode_Deopt[256] = {
[UNPACK_SEQUENCE_TWO_TUPLE] = UNPACK_SEQUENCE,
[WITH_EXCEPT_START] = WITH_EXCEPT_START,
[YIELD_VALUE] = YIELD_VALUE,
[_DO_CALL_FUNCTION_EX] = _DO_CALL_FUNCTION_EX,
};
#endif // NEED_OPCODE_METADATA
#define EXTRA_CASES \
case 116: \
case 117: \
case 118: \
case 119: \

View File

@ -97,54 +97,55 @@ extern "C" {
#define _DICT_MERGE DICT_MERGE
#define _DICT_UPDATE DICT_UPDATE
#define _DO_CALL 356
#define _DO_CALL_KW 357
#define _DYNAMIC_EXIT 358
#define _DO_CALL_FUNCTION_EX 357
#define _DO_CALL_KW 358
#define _DYNAMIC_EXIT 359
#define _END_SEND END_SEND
#define _ERROR_POP_N 359
#define _ERROR_POP_N 360
#define _EXIT_INIT_CHECK EXIT_INIT_CHECK
#define _EXPAND_METHOD 360
#define _EXPAND_METHOD_KW 361
#define _FATAL_ERROR 362
#define _EXPAND_METHOD 361
#define _EXPAND_METHOD_KW 362
#define _FATAL_ERROR 363
#define _FORMAT_SIMPLE FORMAT_SIMPLE
#define _FORMAT_WITH_SPEC FORMAT_WITH_SPEC
#define _FOR_ITER 363
#define _FOR_ITER_GEN_FRAME 364
#define _FOR_ITER_TIER_TWO 365
#define _FOR_ITER 364
#define _FOR_ITER_GEN_FRAME 365
#define _FOR_ITER_TIER_TWO 366
#define _GET_AITER GET_AITER
#define _GET_ANEXT GET_ANEXT
#define _GET_AWAITABLE GET_AWAITABLE
#define _GET_ITER GET_ITER
#define _GET_LEN GET_LEN
#define _GET_YIELD_FROM_ITER GET_YIELD_FROM_ITER
#define _GUARD_BOTH_FLOAT 366
#define _GUARD_BOTH_INT 367
#define _GUARD_BOTH_UNICODE 368
#define _GUARD_BUILTINS_VERSION 369
#define _GUARD_DORV_NO_DICT 370
#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 371
#define _GUARD_GLOBALS_VERSION 372
#define _GUARD_IS_FALSE_POP 373
#define _GUARD_IS_NONE_POP 374
#define _GUARD_IS_NOT_NONE_POP 375
#define _GUARD_IS_TRUE_POP 376
#define _GUARD_KEYS_VERSION 377
#define _GUARD_NOS_FLOAT 378
#define _GUARD_NOS_INT 379
#define _GUARD_NOT_EXHAUSTED_LIST 380
#define _GUARD_NOT_EXHAUSTED_RANGE 381
#define _GUARD_NOT_EXHAUSTED_TUPLE 382
#define _GUARD_TOS_FLOAT 383
#define _GUARD_TOS_INT 384
#define _GUARD_TYPE_VERSION 385
#define _GUARD_BOTH_FLOAT 367
#define _GUARD_BOTH_INT 368
#define _GUARD_BOTH_UNICODE 369
#define _GUARD_BUILTINS_VERSION 370
#define _GUARD_DORV_NO_DICT 371
#define _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT 372
#define _GUARD_GLOBALS_VERSION 373
#define _GUARD_IS_FALSE_POP 374
#define _GUARD_IS_NONE_POP 375
#define _GUARD_IS_NOT_NONE_POP 376
#define _GUARD_IS_TRUE_POP 377
#define _GUARD_KEYS_VERSION 378
#define _GUARD_NOS_FLOAT 379
#define _GUARD_NOS_INT 380
#define _GUARD_NOT_EXHAUSTED_LIST 381
#define _GUARD_NOT_EXHAUSTED_RANGE 382
#define _GUARD_NOT_EXHAUSTED_TUPLE 383
#define _GUARD_TOS_FLOAT 384
#define _GUARD_TOS_INT 385
#define _GUARD_TYPE_VERSION 386
#define _IMPORT_FROM IMPORT_FROM
#define _IMPORT_NAME IMPORT_NAME
#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 386
#define _INIT_CALL_PY_EXACT_ARGS 387
#define _INIT_CALL_PY_EXACT_ARGS_0 388
#define _INIT_CALL_PY_EXACT_ARGS_1 389
#define _INIT_CALL_PY_EXACT_ARGS_2 390
#define _INIT_CALL_PY_EXACT_ARGS_3 391
#define _INIT_CALL_PY_EXACT_ARGS_4 392
#define _INIT_CALL_BOUND_METHOD_EXACT_ARGS 387
#define _INIT_CALL_PY_EXACT_ARGS 388
#define _INIT_CALL_PY_EXACT_ARGS_0 389
#define _INIT_CALL_PY_EXACT_ARGS_1 390
#define _INIT_CALL_PY_EXACT_ARGS_2 391
#define _INIT_CALL_PY_EXACT_ARGS_3 392
#define _INIT_CALL_PY_EXACT_ARGS_4 393
#define _INSTRUMENTED_CALL_FUNCTION_EX INSTRUMENTED_CALL_FUNCTION_EX
#define _INSTRUMENTED_CALL_KW INSTRUMENTED_CALL_KW
#define _INSTRUMENTED_FOR_ITER INSTRUMENTED_FOR_ITER
@ -156,131 +157,133 @@ extern "C" {
#define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE
#define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE
#define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE
#define _INTERNAL_INCREMENT_OPT_COUNTER 393
#define _IS_NONE 394
#define _INTERNAL_INCREMENT_OPT_COUNTER 394
#define _IS_NONE 395
#define _IS_OP IS_OP
#define _ITER_CHECK_LIST 395
#define _ITER_CHECK_RANGE 396
#define _ITER_CHECK_TUPLE 397
#define _ITER_JUMP_LIST 398
#define _ITER_JUMP_RANGE 399
#define _ITER_JUMP_TUPLE 400
#define _ITER_NEXT_LIST 401
#define _ITER_NEXT_RANGE 402
#define _ITER_NEXT_TUPLE 403
#define _JUMP_TO_TOP 404
#define _ITER_CHECK_LIST 396
#define _ITER_CHECK_RANGE 397
#define _ITER_CHECK_TUPLE 398
#define _ITER_JUMP_LIST 399
#define _ITER_JUMP_RANGE 400
#define _ITER_JUMP_TUPLE 401
#define _ITER_NEXT_LIST 402
#define _ITER_NEXT_RANGE 403
#define _ITER_NEXT_TUPLE 404
#define _JUMP_TO_TOP 405
#define _LIST_APPEND LIST_APPEND
#define _LIST_EXTEND LIST_EXTEND
#define _LOAD_ATTR 405
#define _LOAD_ATTR_CLASS 406
#define _LOAD_ATTR_CLASS_0 407
#define _LOAD_ATTR_CLASS_1 408
#define _LOAD_ATTR 406
#define _LOAD_ATTR_CLASS 407
#define _LOAD_ATTR_CLASS_0 408
#define _LOAD_ATTR_CLASS_1 409
#define _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN
#define _LOAD_ATTR_INSTANCE_VALUE 409
#define _LOAD_ATTR_INSTANCE_VALUE_0 410
#define _LOAD_ATTR_INSTANCE_VALUE_1 411
#define _LOAD_ATTR_METHOD_LAZY_DICT 412
#define _LOAD_ATTR_METHOD_NO_DICT 413
#define _LOAD_ATTR_METHOD_WITH_VALUES 414
#define _LOAD_ATTR_MODULE 415
#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 416
#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 417
#define _LOAD_ATTR_PROPERTY_FRAME 418
#define _LOAD_ATTR_SLOT 419
#define _LOAD_ATTR_SLOT_0 420
#define _LOAD_ATTR_SLOT_1 421
#define _LOAD_ATTR_WITH_HINT 422
#define _LOAD_ATTR_INSTANCE_VALUE 410
#define _LOAD_ATTR_INSTANCE_VALUE_0 411
#define _LOAD_ATTR_INSTANCE_VALUE_1 412
#define _LOAD_ATTR_METHOD_LAZY_DICT 413
#define _LOAD_ATTR_METHOD_NO_DICT 414
#define _LOAD_ATTR_METHOD_WITH_VALUES 415
#define _LOAD_ATTR_MODULE 416
#define _LOAD_ATTR_NONDESCRIPTOR_NO_DICT 417
#define _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES 418
#define _LOAD_ATTR_PROPERTY_FRAME 419
#define _LOAD_ATTR_SLOT 420
#define _LOAD_ATTR_SLOT_0 421
#define _LOAD_ATTR_SLOT_1 422
#define _LOAD_ATTR_WITH_HINT 423
#define _LOAD_BUILD_CLASS LOAD_BUILD_CLASS
#define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT
#define _LOAD_CONST LOAD_CONST
#define _LOAD_CONST_INLINE 423
#define _LOAD_CONST_INLINE_BORROW 424
#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 425
#define _LOAD_CONST_INLINE_WITH_NULL 426
#define _LOAD_CONST_INLINE 424
#define _LOAD_CONST_INLINE_BORROW 425
#define _LOAD_CONST_INLINE_BORROW_WITH_NULL 426
#define _LOAD_CONST_INLINE_WITH_NULL 427
#define _LOAD_DEREF LOAD_DEREF
#define _LOAD_FAST 427
#define _LOAD_FAST_0 428
#define _LOAD_FAST_1 429
#define _LOAD_FAST_2 430
#define _LOAD_FAST_3 431
#define _LOAD_FAST_4 432
#define _LOAD_FAST_5 433
#define _LOAD_FAST_6 434
#define _LOAD_FAST_7 435
#define _LOAD_FAST 428
#define _LOAD_FAST_0 429
#define _LOAD_FAST_1 430
#define _LOAD_FAST_2 431
#define _LOAD_FAST_3 432
#define _LOAD_FAST_4 433
#define _LOAD_FAST_5 434
#define _LOAD_FAST_6 435
#define _LOAD_FAST_7 436
#define _LOAD_FAST_AND_CLEAR LOAD_FAST_AND_CLEAR
#define _LOAD_FAST_CHECK LOAD_FAST_CHECK
#define _LOAD_FAST_LOAD_FAST LOAD_FAST_LOAD_FAST
#define _LOAD_FROM_DICT_OR_DEREF LOAD_FROM_DICT_OR_DEREF
#define _LOAD_FROM_DICT_OR_GLOBALS LOAD_FROM_DICT_OR_GLOBALS
#define _LOAD_GLOBAL 436
#define _LOAD_GLOBAL_BUILTINS 437
#define _LOAD_GLOBAL_MODULE 438
#define _LOAD_GLOBAL 437
#define _LOAD_GLOBAL_BUILTINS 438
#define _LOAD_GLOBAL_MODULE 439
#define _LOAD_LOCALS LOAD_LOCALS
#define _LOAD_NAME LOAD_NAME
#define _LOAD_SPECIAL LOAD_SPECIAL
#define _LOAD_SUPER_ATTR_ATTR LOAD_SUPER_ATTR_ATTR
#define _LOAD_SUPER_ATTR_METHOD LOAD_SUPER_ATTR_METHOD
#define _MAKE_CALLARGS_A_TUPLE 440
#define _MAKE_CELL MAKE_CELL
#define _MAKE_FUNCTION MAKE_FUNCTION
#define _MAKE_WARM 439
#define _MAKE_WARM 441
#define _MAP_ADD MAP_ADD
#define _MATCH_CLASS MATCH_CLASS
#define _MATCH_KEYS MATCH_KEYS
#define _MATCH_MAPPING MATCH_MAPPING
#define _MATCH_SEQUENCE MATCH_SEQUENCE
#define _MAYBE_EXPAND_METHOD 440
#define _MONITOR_CALL 441
#define _MONITOR_JUMP_BACKWARD 442
#define _MONITOR_RESUME 443
#define _MAYBE_EXPAND_METHOD 442
#define _MAYBE_EXPAND_METHOD_KW 443
#define _MONITOR_CALL 444
#define _MONITOR_JUMP_BACKWARD 445
#define _MONITOR_RESUME 446
#define _NOP NOP
#define _POP_EXCEPT POP_EXCEPT
#define _POP_JUMP_IF_FALSE 444
#define _POP_JUMP_IF_TRUE 445
#define _POP_JUMP_IF_FALSE 447
#define _POP_JUMP_IF_TRUE 448
#define _POP_TOP POP_TOP
#define _POP_TOP_LOAD_CONST_INLINE_BORROW 446
#define _POP_TOP_LOAD_CONST_INLINE_BORROW 449
#define _PUSH_EXC_INFO PUSH_EXC_INFO
#define _PUSH_FRAME 447
#define _PUSH_FRAME 450
#define _PUSH_NULL PUSH_NULL
#define _PY_FRAME_GENERAL 448
#define _PY_FRAME_KW 449
#define _QUICKEN_RESUME 450
#define _REPLACE_WITH_TRUE 451
#define _PY_FRAME_GENERAL 451
#define _PY_FRAME_KW 452
#define _QUICKEN_RESUME 453
#define _REPLACE_WITH_TRUE 454
#define _RESUME_CHECK RESUME_CHECK
#define _RETURN_GENERATOR RETURN_GENERATOR
#define _RETURN_VALUE RETURN_VALUE
#define _SAVE_RETURN_OFFSET 452
#define _SEND 453
#define _SEND_GEN_FRAME 454
#define _SAVE_RETURN_OFFSET 455
#define _SEND 456
#define _SEND_GEN_FRAME 457
#define _SETUP_ANNOTATIONS SETUP_ANNOTATIONS
#define _SET_ADD SET_ADD
#define _SET_FUNCTION_ATTRIBUTE SET_FUNCTION_ATTRIBUTE
#define _SET_UPDATE SET_UPDATE
#define _START_EXECUTOR 455
#define _STORE_ATTR 456
#define _STORE_ATTR_INSTANCE_VALUE 457
#define _STORE_ATTR_SLOT 458
#define _STORE_ATTR_WITH_HINT 459
#define _START_EXECUTOR 458
#define _STORE_ATTR 459
#define _STORE_ATTR_INSTANCE_VALUE 460
#define _STORE_ATTR_SLOT 461
#define _STORE_ATTR_WITH_HINT 462
#define _STORE_DEREF STORE_DEREF
#define _STORE_FAST 460
#define _STORE_FAST_0 461
#define _STORE_FAST_1 462
#define _STORE_FAST_2 463
#define _STORE_FAST_3 464
#define _STORE_FAST_4 465
#define _STORE_FAST_5 466
#define _STORE_FAST_6 467
#define _STORE_FAST_7 468
#define _STORE_FAST 463
#define _STORE_FAST_0 464
#define _STORE_FAST_1 465
#define _STORE_FAST_2 466
#define _STORE_FAST_3 467
#define _STORE_FAST_4 468
#define _STORE_FAST_5 469
#define _STORE_FAST_6 470
#define _STORE_FAST_7 471
#define _STORE_FAST_LOAD_FAST STORE_FAST_LOAD_FAST
#define _STORE_FAST_STORE_FAST STORE_FAST_STORE_FAST
#define _STORE_GLOBAL STORE_GLOBAL
#define _STORE_NAME STORE_NAME
#define _STORE_SLICE 469
#define _STORE_SUBSCR 470
#define _STORE_SLICE 472
#define _STORE_SUBSCR 473
#define _STORE_SUBSCR_DICT STORE_SUBSCR_DICT
#define _STORE_SUBSCR_LIST_INT STORE_SUBSCR_LIST_INT
#define _SWAP SWAP
#define _TIER2_RESUME_CHECK 471
#define _TO_BOOL 472
#define _TIER2_RESUME_CHECK 474
#define _TO_BOOL 475
#define _TO_BOOL_BOOL TO_BOOL_BOOL
#define _TO_BOOL_INT TO_BOOL_INT
#define _TO_BOOL_LIST TO_BOOL_LIST
@ -290,14 +293,13 @@ extern "C" {
#define _UNARY_NEGATIVE UNARY_NEGATIVE
#define _UNARY_NOT UNARY_NOT
#define _UNPACK_EX UNPACK_EX
#define _UNPACK_SEQUENCE 473
#define _UNPACK_SEQUENCE 476
#define _UNPACK_SEQUENCE_LIST UNPACK_SEQUENCE_LIST
#define _UNPACK_SEQUENCE_TUPLE UNPACK_SEQUENCE_TUPLE
#define _UNPACK_SEQUENCE_TWO_TUPLE UNPACK_SEQUENCE_TWO_TUPLE
#define _WITH_EXCEPT_START WITH_EXCEPT_START
#define _YIELD_VALUE YIELD_VALUE
#define __DO_CALL_FUNCTION_EX _DO_CALL_FUNCTION_EX
#define MAX_UOP_ID 473
#define MAX_UOP_ID 476
#ifdef __cplusplus
}

View File

@ -54,10 +54,10 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_UNARY_NOT] = HAS_PURE_FLAG,
[_TO_BOOL] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_TO_BOOL_BOOL] = HAS_EXIT_FLAG,
[_TO_BOOL_INT] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG,
[_TO_BOOL_INT] = HAS_EXIT_FLAG,
[_TO_BOOL_LIST] = HAS_EXIT_FLAG,
[_TO_BOOL_NONE] = HAS_EXIT_FLAG,
[_TO_BOOL_STR] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG,
[_TO_BOOL_STR] = HAS_EXIT_FLAG,
[_REPLACE_WITH_TRUE] = 0,
[_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_GUARD_BOTH_INT] = HAS_EXIT_FLAG,
@ -74,7 +74,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BINARY_OP_SUBTRACT_FLOAT] = HAS_PURE_FLAG,
[_GUARD_BOTH_UNICODE] = HAS_EXIT_FLAG,
[_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
[_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG,
[_BINARY_SUBSCR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@ -82,7 +82,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BINARY_SUBSCR_STR_INT] = HAS_DEOPT_FLAG,
[_BINARY_SUBSCR_TUPLE_INT] = HAS_DEOPT_FLAG,
[_BINARY_SUBSCR_DICT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_SUBSCR_CHECK_FUNC] = HAS_DEOPT_FLAG,
[_BINARY_SUBSCR_INIT_CALL] = 0,
[_LIST_APPEND] = HAS_ARG_FLAG | HAS_ERROR_FLAG,
[_SET_ADD] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@ -97,7 +97,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_GET_ANEXT] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_GET_AWAITABLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_SEND_GEN_FRAME] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
[_YIELD_VALUE] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
[_YIELD_VALUE] = HAS_ARG_FLAG,
[_POP_EXCEPT] = HAS_ESCAPES_FLAG,
[_LOAD_COMMON_CONSTANT] = HAS_ARG_FLAG,
[_LOAD_BUILD_CLASS] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@ -156,7 +156,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_LOAD_ATTR_CLASS_0] = 0,
[_LOAD_ATTR_CLASS_1] = 0,
[_LOAD_ATTR_CLASS] = HAS_ARG_FLAG | HAS_OPARG_AND_1_FLAG,
[_LOAD_ATTR_PROPERTY_FRAME] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_LOAD_ATTR_PROPERTY_FRAME] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
[_GUARD_DORV_NO_DICT] = HAS_EXIT_FLAG,
[_STORE_ATTR_INSTANCE_VALUE] = 0,
[_STORE_ATTR_WITH_HINT] = HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
@ -225,7 +225,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_CALL_TYPE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG,
[_CALL_STR_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CALL_TUPLE_1] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CHECK_AND_ALLOCATE_OBJECT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_CHECK_AND_ALLOCATE_OBJECT] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG,
[_CREATE_INIT_FRAME] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_EXIT_INIT_CHECK] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_CALL_BUILTIN_CLASS] = HAS_ARG_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
@ -239,17 +239,19 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CALL_METHOD_DESCRIPTOR_NOARGS] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_CALL_METHOD_DESCRIPTOR_FAST] = HAS_ARG_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_MAYBE_EXPAND_METHOD_KW] = HAS_ARG_FLAG,
[_PY_FRAME_KW] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_CHECK_FUNCTION_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
[_CHECK_METHOD_VERSION_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
[_EXPAND_METHOD_KW] = HAS_ARG_FLAG,
[_CHECK_IS_NOT_PY_CALLABLE_KW] = HAS_ARG_FLAG | HAS_EXIT_FLAG,
[_CALL_KW_NON_PY] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_SET_FUNCTION_ATTRIBUTE] = HAS_ARG_FLAG | HAS_ESCAPES_FLAG,
[_RETURN_GENERATOR] = HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_MAKE_CALLARGS_A_TUPLE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_MAKE_FUNCTION] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_SET_FUNCTION_ATTRIBUTE] = HAS_ARG_FLAG,
[_RETURN_GENERATOR] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BUILD_SLICE] = HAS_ARG_FLAG | HAS_ERROR_FLAG,
[_CONVERT_VALUE] = HAS_ARG_FLAG | HAS_ERROR_FLAG,
[_CONVERT_VALUE] = HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_FORMAT_SIMPLE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_FORMAT_WITH_SPEC] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_COPY] = HAS_ARG_FLAG | HAS_PURE_FLAG,
@ -480,6 +482,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_LOAD_SPECIAL] = "_LOAD_SPECIAL",
[_LOAD_SUPER_ATTR_ATTR] = "_LOAD_SUPER_ATTR_ATTR",
[_LOAD_SUPER_ATTR_METHOD] = "_LOAD_SUPER_ATTR_METHOD",
[_MAKE_CALLARGS_A_TUPLE] = "_MAKE_CALLARGS_A_TUPLE",
[_MAKE_CELL] = "_MAKE_CELL",
[_MAKE_FUNCTION] = "_MAKE_FUNCTION",
[_MAKE_WARM] = "_MAKE_WARM",
@ -489,6 +492,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_MATCH_MAPPING] = "_MATCH_MAPPING",
[_MATCH_SEQUENCE] = "_MATCH_SEQUENCE",
[_MAYBE_EXPAND_METHOD] = "_MAYBE_EXPAND_METHOD",
[_MAYBE_EXPAND_METHOD_KW] = "_MAYBE_EXPAND_METHOD_KW",
[_NOP] = "_NOP",
[_POP_EXCEPT] = "_POP_EXCEPT",
[_POP_TOP] = "_POP_TOP",
@ -994,6 +998,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 2 + oparg;
case _CALL_METHOD_DESCRIPTOR_FAST:
return 2 + oparg;
case _MAYBE_EXPAND_METHOD_KW:
return 3 + oparg;
case _PY_FRAME_KW:
return 3 + oparg;
case _CHECK_FUNCTION_VERSION_KW:
@ -1006,6 +1012,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 3 + oparg;
case _CALL_KW_NON_PY:
return 3 + oparg;
case _MAKE_CALLARGS_A_TUPLE:
return 3 + (oparg & 1);
case _MAKE_FUNCTION:
return 1;
case _SET_FUNCTION_ATTRIBUTE:

1
Include/opcode_ids.h generated
View File

@ -126,7 +126,6 @@ extern "C" {
#define UNPACK_EX 113
#define UNPACK_SEQUENCE 114
#define YIELD_VALUE 115
#define _DO_CALL_FUNCTION_EX 116
#define RESUME 149
#define BINARY_OP_ADD_FLOAT 150
#define BINARY_OP_ADD_INT 151

View File

@ -315,7 +315,6 @@ opmap = {
'UNPACK_EX': 113,
'UNPACK_SEQUENCE': 114,
'YIELD_VALUE': 115,
'_DO_CALL_FUNCTION_EX': 116,
'INSTRUMENTED_END_FOR': 236,
'INSTRUMENTED_END_SEND': 237,
'INSTRUMENTED_LOAD_SUPER_ATTR': 238,

View File

@ -1214,9 +1214,9 @@ class StreamTests(test_utils.TestCase):
# can't use assertRaises because that clears frames
exc = excs.exceptions[0]
self.assertIsNotNone(exc)
self.assertListEqual(gc.get_referrers(exc), [])
asyncio.run(main())
self.assertListEqual(gc.get_referrers(exc), [main_coro])
main_coro = main()
asyncio.run(main_coro)
if __name__ == '__main__':

View File

@ -60,7 +60,7 @@ class TestEffects(unittest.TestCase):
stack.pop(y)
stack.pop(x)
for out in outputs:
stack.push(Local.local(out))
stack.push(Local.undefined(out))
self.assertEqual(stack.base_offset.to_c(), "-1 - oparg - oparg*2")
self.assertEqual(stack.top_offset.to_c(), "1 - oparg - oparg*2 + oparg*4")
@ -122,7 +122,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_inst_no_args(self):
input = """
inst(OP, (--)) {
spam();
SPAM();
}
"""
output = """
@ -130,7 +130,7 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
spam();
SPAM();
DISPATCH();
}
"""
@ -139,7 +139,8 @@ class TestGeneratedCases(unittest.TestCase):
def test_inst_one_pop(self):
input = """
inst(OP, (value --)) {
spam(value);
SPAM(value);
DEAD(value);
}
"""
output = """
@ -149,7 +150,7 @@ class TestGeneratedCases(unittest.TestCase):
INSTRUCTION_STATS(OP);
_PyStackRef value;
value = stack_pointer[-1];
spam(value);
SPAM(value);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
@ -160,7 +161,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_inst_one_push(self):
input = """
inst(OP, (-- res)) {
res = spam();
res = SPAM();
}
"""
output = """
@ -169,7 +170,7 @@ class TestGeneratedCases(unittest.TestCase):
next_instr += 1;
INSTRUCTION_STATS(OP);
_PyStackRef res;
res = spam();
res = SPAM();
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -181,7 +182,8 @@ class TestGeneratedCases(unittest.TestCase):
def test_inst_one_push_one_pop(self):
input = """
inst(OP, (value -- res)) {
res = spam(value);
res = SPAM(value);
DEAD(value);
}
"""
output = """
@ -192,7 +194,7 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef value;
_PyStackRef res;
value = stack_pointer[-1];
res = spam(value);
res = SPAM(value);
stack_pointer[-1] = res;
DISPATCH();
}
@ -202,7 +204,9 @@ class TestGeneratedCases(unittest.TestCase):
def test_binary_op(self):
input = """
inst(OP, (left, right -- res)) {
res = spam(left, right);
res = SPAM(left, right);
INPUTS_DEAD();
}
"""
output = """
@ -215,7 +219,7 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
res = spam(left, right);
res = SPAM(left, right);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -227,7 +231,8 @@ class TestGeneratedCases(unittest.TestCase):
def test_overlap(self):
input = """
inst(OP, (left, right -- left, result)) {
result = spam(left, right);
result = SPAM(left, right);
INPUTS_DEAD();
}
"""
output = """
@ -240,7 +245,7 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef result;
right = stack_pointer[-1];
left = stack_pointer[-2];
result = spam(left, right);
result = SPAM(left, right);
stack_pointer[-1] = result;
DISPATCH();
}
@ -249,7 +254,8 @@ class TestGeneratedCases(unittest.TestCase):
def test_predictions(self):
input = """
inst(OP1, (arg -- rest)) {
inst(OP1, (arg -- res)) {
res = Py_None;
}
inst(OP3, (arg -- res)) {
DEOPT_IF(xxx);
@ -263,7 +269,9 @@ class TestGeneratedCases(unittest.TestCase):
next_instr += 1;
INSTRUCTION_STATS(OP1);
PREDICTED(OP1);
stack_pointer[-1] = rest;
_PyStackRef res;
res = Py_None;
stack_pointer[-1] = res;
DISPATCH();
}
@ -281,6 +289,67 @@ class TestGeneratedCases(unittest.TestCase):
"""
self.run_cases_test(input, output)
def test_sync_sp(self):
input = """
inst(A, (arg -- res)) {
SYNC_SP();
escaping_call();
res = Py_None;
}
inst(B, (arg -- res)) {
res = Py_None;
SYNC_SP();
escaping_call();
}
"""
output = """
TARGET(A) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(A);
_PyStackRef res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
escaping_call();
stack_pointer = _PyFrame_GetStackPointer(frame);
res = Py_None;
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
}
TARGET(B) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(B);
_PyStackRef res;
res = Py_None;
stack_pointer[-1] = res;
_PyFrame_SetStackPointer(frame, stack_pointer);
escaping_call();
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
"""
self.run_cases_test(input, output)
def test_pep7_condition(self):
input = """
inst(OP, (arg1 -- out)) {
if (arg1)
out = 0;
else {
out = 1;
}
}
"""
output = ""
with self.assertRaises(SyntaxError):
self.run_cases_test(input, output)
def test_error_if_plain(self):
input = """
inst(OP, (--)) {
@ -319,7 +388,38 @@ class TestGeneratedCases(unittest.TestCase):
def test_error_if_pop(self):
input = """
inst(OP, (left, right -- res)) {
res = spam(left, right);
SPAM(left, right);
INPUTS_DEAD();
ERROR_IF(cond, label);
res = 0;
}
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
_PyStackRef left;
_PyStackRef right;
_PyStackRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
SPAM(left, right);
if (cond) goto pop_2_label;
res = 0;
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
}
"""
self.run_cases_test(input, output)
def test_error_if_pop_with_result(self):
input = """
inst(OP, (left, right -- res)) {
res = SPAM(left, right);
INPUTS_DEAD();
ERROR_IF(cond, label);
}
"""
@ -333,7 +433,7 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
res = spam(left, right);
res = SPAM(left, right);
if (cond) goto pop_2_label;
stack_pointer[-2] = res;
stack_pointer += -1;
@ -388,10 +488,12 @@ class TestGeneratedCases(unittest.TestCase):
}
op(OP2, (extra/2, arg2, left, right -- res)) {
res = op2(arg2, left, right);
INPUTS_DEAD();
}
macro(OP) = OP1 + cache/2 + OP2;
inst(OP3, (unused/5, arg2, left, right -- res)) {
res = op3(arg2, left, right);
INPUTS_DEAD();
}
family(OP, INLINE_CACHE_ENTRIES_OP) = { OP3 };
"""
@ -408,20 +510,24 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef arg2;
_PyStackRef res;
// _OP1
{
right = stack_pointer[-1];
left = stack_pointer[-2];
{
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
_PyFrame_SetStackPointer(frame, stack_pointer);
op1(left, right);
stack_pointer = _PyFrame_GetStackPointer(frame);
}
/* Skip 2 cache entries */
// OP2
arg2 = stack_pointer[-3];
{
arg2 = stack_pointer[-3];
uint32_t extra = read_u32(&this_instr[4].cache);
(void)extra;
_PyFrame_SetStackPointer(frame, stack_pointer);
res = op2(arg2, left, right);
stack_pointer = _PyFrame_GetStackPointer(frame);
}
stack_pointer[-3] = res;
stack_pointer += -2;
@ -440,7 +546,9 @@ class TestGeneratedCases(unittest.TestCase):
left = stack_pointer[-2];
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
_PyFrame_SetStackPointer(frame, stack_pointer);
op1(left, right);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
@ -457,7 +565,9 @@ class TestGeneratedCases(unittest.TestCase):
right = stack_pointer[-1];
left = stack_pointer[-2];
arg2 = stack_pointer[-3];
_PyFrame_SetStackPointer(frame, stack_pointer);
res = op3(arg2, left, right);
stack_pointer = _PyFrame_GetStackPointer(frame);
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
@ -469,7 +579,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_unused_caches(self):
input = """
inst(OP, (unused/1, unused/2 --)) {
body();
body;
}
"""
output = """
@ -479,7 +589,7 @@ class TestGeneratedCases(unittest.TestCase):
INSTRUCTION_STATS(OP);
/* Skip 1 cache entry */
/* Skip 2 cache entries */
body();
body;
DISPATCH();
}
"""
@ -556,7 +666,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_array_input(self):
input = """
inst(OP, (below, values[oparg*2], above --)) {
spam(values, oparg);
SPAM(values, oparg);
}
"""
output = """
@ -566,7 +676,7 @@ class TestGeneratedCases(unittest.TestCase):
INSTRUCTION_STATS(OP);
_PyStackRef *values;
values = &stack_pointer[-1 - oparg*2];
spam(values, oparg);
SPAM(values, oparg);
stack_pointer += -2 - oparg*2;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
@ -577,7 +687,9 @@ class TestGeneratedCases(unittest.TestCase):
def test_array_output(self):
input = """
inst(OP, (unused, unused -- below, values[oparg*3], above)) {
spam(values, oparg);
SPAM(values, oparg);
below = 0;
above = 0;
}
"""
output = """
@ -585,9 +697,13 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
_PyStackRef below;
_PyStackRef *values;
_PyStackRef above;
values = &stack_pointer[-1];
spam(values, oparg);
SPAM(values, oparg);
below = 0;
above = 0;
stack_pointer[-2] = below;
stack_pointer[-1 + oparg*3] = above;
stack_pointer += oparg*3;
@ -600,7 +716,8 @@ class TestGeneratedCases(unittest.TestCase):
def test_array_input_output(self):
input = """
inst(OP, (values[oparg] -- values[oparg], above)) {
spam(values, oparg);
SPAM(values, oparg);
above = 0;
}
"""
output = """
@ -609,8 +726,10 @@ class TestGeneratedCases(unittest.TestCase):
next_instr += 1;
INSTRUCTION_STATS(OP);
_PyStackRef *values;
_PyStackRef above;
values = &stack_pointer[-oparg];
spam(values, oparg);
SPAM(values, oparg);
above = 0;
stack_pointer[0] = above;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -645,7 +764,10 @@ class TestGeneratedCases(unittest.TestCase):
def test_cond_effect(self):
input = """
inst(OP, (aa, input if ((oparg & 1) == 1), cc -- xx, output if (oparg & 2), zz)) {
output = spam(oparg, aa, cc, input);
output = SPAM(oparg, aa, cc, input);
INPUTS_DEAD();
xx = 0;
zz = 0;
}
"""
output = """
@ -656,11 +778,15 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef aa;
_PyStackRef input = PyStackRef_NULL;
_PyStackRef cc;
_PyStackRef xx;
_PyStackRef output = PyStackRef_NULL;
_PyStackRef zz;
cc = stack_pointer[-1];
if ((oparg & 1) == 1) { input = stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)]; }
aa = stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)];
output = spam(oparg, aa, cc, input);
output = SPAM(oparg, aa, cc, input);
xx = 0;
zz = 0;
stack_pointer[-2 - (((oparg & 1) == 1) ? 1 : 0)] = xx;
if (oparg & 2) stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0)] = output;
stack_pointer[-1 - (((oparg & 1) == 1) ? 1 : 0) + ((oparg & 2) ? 1 : 0)] = zz;
@ -674,11 +800,14 @@ class TestGeneratedCases(unittest.TestCase):
def test_macro_cond_effect(self):
input = """
op(A, (left, middle, right --)) {
use(left, middle, right);
USE(left, middle, right);
INPUTS_DEAD();
}
op(B, (-- deep, extra if (oparg), res)) {
deep = -1;
res = 0;
extra = 1;
INPUTS_DEAD();
}
macro(M) = A + B;
"""
@ -690,17 +819,19 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef left;
_PyStackRef middle;
_PyStackRef right;
_PyStackRef deep;
_PyStackRef extra = PyStackRef_NULL;
_PyStackRef res;
// A
{
right = stack_pointer[-1];
middle = stack_pointer[-2];
left = stack_pointer[-3];
{
use(left, middle, right);
USE(left, middle, right);
}
// B
{
deep = -1;
res = 0;
extra = 1;
}
@ -717,10 +848,10 @@ class TestGeneratedCases(unittest.TestCase):
def test_macro_push_push(self):
input = """
op(A, (-- val1)) {
val1 = spam();
val1 = SPAM();
}
op(B, (-- val2)) {
val2 = spam();
val2 = SPAM();
}
macro(M) = A + B;
"""
@ -733,11 +864,11 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef val2;
// A
{
val1 = spam();
val1 = SPAM();
}
// B
{
val2 = spam();
val2 = SPAM();
}
stack_pointer[0] = val1;
stack_pointer[1] = val2;
@ -751,10 +882,10 @@ class TestGeneratedCases(unittest.TestCase):
def test_override_inst(self):
input = """
inst(OP, (--)) {
spam();
spam;
}
override inst(OP, (--)) {
ham();
ham;
}
"""
output = """
@ -762,7 +893,7 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
ham();
ham;
DISPATCH();
}
"""
@ -771,11 +902,11 @@ class TestGeneratedCases(unittest.TestCase):
def test_override_op(self):
input = """
op(OP, (--)) {
spam();
spam;
}
macro(M) = OP;
override op(OP, (--)) {
ham();
ham;
}
"""
output = """
@ -783,7 +914,7 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(M);
ham();
ham;
DISPATCH();
}
"""
@ -792,7 +923,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_annotated_inst(self):
input = """
pure inst(OP, (--)) {
ham();
ham;
}
"""
output = """
@ -800,7 +931,7 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
ham();
ham;
DISPATCH();
}
"""
@ -809,7 +940,7 @@ class TestGeneratedCases(unittest.TestCase):
def test_annotated_op(self):
input = """
pure op(OP, (--)) {
spam();
SPAM();
}
macro(M) = OP;
"""
@ -818,7 +949,7 @@ class TestGeneratedCases(unittest.TestCase):
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(M);
spam();
SPAM();
DISPATCH();
}
"""
@ -826,7 +957,7 @@ class TestGeneratedCases(unittest.TestCase):
input = """
pure register specializing op(OP, (--)) {
spam();
SPAM();
}
macro(M) = OP;
"""
@ -840,7 +971,7 @@ class TestGeneratedCases(unittest.TestCase):
}
"""
output = ""
with self.assertRaises(Exception):
with self.assertRaises(SyntaxError):
self.run_cases_test(input, output)
def test_array_of_one(self):
@ -868,6 +999,7 @@ class TestGeneratedCases(unittest.TestCase):
input = """
inst(OP, (arg: _PyStackRef * -- out)) {
out = *arg;
DEAD(arg);
}
"""
output = """
@ -922,14 +1054,14 @@ class TestGeneratedCases(unittest.TestCase):
def test_used_unused_used(self):
input = """
op(FIRST, (w -- w)) {
use(w);
USE(w);
}
op(SECOND, (x -- x)) {
}
op(THIRD, (y -- y)) {
use(y);
USE(y);
}
macro(TEST) = FIRST + SECOND + THIRD;
@ -942,17 +1074,17 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef w;
_PyStackRef y;
// FIRST
w = stack_pointer[-1];
{
use(w);
w = stack_pointer[-1];
USE(w);
}
// SECOND
{
}
// THIRD
y = w;
{
use(y);
y = w;
USE(y);
}
DISPATCH();
}
@ -965,11 +1097,11 @@ class TestGeneratedCases(unittest.TestCase):
}
op(SECOND, (x -- x)) {
use(x);
USE(x);
}
op(THIRD, (y -- y)) {
use(y);
USE(y);
}
macro(TEST) = FIRST + SECOND + THIRD;
@ -985,14 +1117,14 @@ class TestGeneratedCases(unittest.TestCase):
{
}
// SECOND
x = stack_pointer[-1];
{
use(x);
x = stack_pointer[-1];
USE(x);
}
// THIRD
y = x;
{
use(y);
y = x;
USE(y);
}
DISPATCH();
}
@ -1007,7 +1139,8 @@ class TestGeneratedCases(unittest.TestCase):
}
op(SECOND, (a, b -- )) {
use(a, b);
USE(a, b);
INPUTS_DEAD();
}
macro(TEST) = FIRST + flush + SECOND;
@ -1030,10 +1163,8 @@ class TestGeneratedCases(unittest.TestCase):
stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
// SECOND
b = stack_pointer[-1];
a = stack_pointer[-2];
{
use(a, b);
USE(a, b);
}
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
@ -1047,14 +1178,16 @@ class TestGeneratedCases(unittest.TestCase):
input = """
op(FIRST, (x, y -- a, b)) {
a = x;
DEAD(x);
b = y;
DEAD(y);
}
op(SECOND, (a, b -- a, b)) {
}
op(THIRD, (j, k --)) {
j,k; // Mark j and k as used
INPUTS_DEAD(); // Mark j and k as used
ERROR_IF(cond, error);
}
@ -1069,12 +1202,10 @@ class TestGeneratedCases(unittest.TestCase):
_PyStackRef y;
_PyStackRef a;
_PyStackRef b;
_PyStackRef j;
_PyStackRef k;
// FIRST
{
y = stack_pointer[-1];
x = stack_pointer[-2];
{
a = x;
b = y;
}
@ -1082,10 +1213,8 @@ class TestGeneratedCases(unittest.TestCase):
{
}
// THIRD
k = b;
j = a;
{
j,k; // Mark j and k as used
// Mark j and k as used
if (cond) goto pop_2_error;
}
stack_pointer += -2;
@ -1126,7 +1255,8 @@ class TestGeneratedCases(unittest.TestCase):
b = 1;
if (cond) {
stack_pointer[0] = a;
stack_pointer += 1;
stack_pointer[1] = b;
stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
goto error;
}
@ -1178,6 +1308,69 @@ class TestGeneratedCases(unittest.TestCase):
with self.assertRaises(SyntaxError):
self.run_cases_test(input, output)
def test_stack_save_reload(self):
input = """
inst(BALANCED, ( -- )) {
SAVE_STACK();
RELOAD_STACK();
}
"""
output = """
TARGET(BALANCED) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(BALANCED);
_PyFrame_SetStackPointer(frame, stack_pointer);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
"""
self.run_cases_test(input, output)
def test_stack_reload_only(self):
input = """
inst(BALANCED, ( -- )) {
RELOAD_STACK();
}
"""
output = """
TARGET(BALANCED) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(BALANCED);
_PyFrame_SetStackPointer(frame, stack_pointer);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
"""
with self.assertRaises(SyntaxError):
self.run_cases_test(input, output)
def test_stack_save_only(self):
input = """
inst(BALANCED, ( -- )) {
SAVE_STACK();
}
"""
output = """
TARGET(BALANCED) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(BALANCED);
_PyFrame_SetStackPointer(frame, stack_pointer);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
"""
with self.assertRaises(SyntaxError):
self.run_cases_test(input, output)
class TestGeneratedAbstractCases(unittest.TestCase):
def setUp(self) -> None:
@ -1232,7 +1425,7 @@ class TestGeneratedAbstractCases(unittest.TestCase):
def test_overridden_abstract(self):
input = """
pure op(OP, (--)) {
spam();
SPAM();
}
"""
input2 = """
@ -1251,22 +1444,23 @@ class TestGeneratedAbstractCases(unittest.TestCase):
def test_overridden_abstract_args(self):
input = """
pure op(OP, (arg1 -- out)) {
spam();
out = SPAM(arg1);
}
op(OP2, (arg1 -- out)) {
eggs();
out = EGGS(arg1);
}
"""
input2 = """
op(OP, (arg1 -- out)) {
eggs();
out = EGGS(arg1);
}
"""
output = """
case OP: {
_Py_UopsSymbol *arg1;
_Py_UopsSymbol *out;
eggs();
arg1 = stack_pointer[-1];
out = EGGS(arg1);
stack_pointer[-1] = out;
break;
}
@ -1283,7 +1477,7 @@ class TestGeneratedAbstractCases(unittest.TestCase):
def test_no_overridden_case(self):
input = """
pure op(OP, (arg1 -- out)) {
spam();
out = SPAM(arg1);
}
pure op(OP2, (arg1 -- out)) {
@ -1292,6 +1486,7 @@ class TestGeneratedAbstractCases(unittest.TestCase):
"""
input2 = """
pure op(OP2, (arg1 -- out)) {
out = NULL;
}
"""
output = """
@ -1303,8 +1498,8 @@ class TestGeneratedAbstractCases(unittest.TestCase):
}
case OP2: {
_Py_UopsSymbol *arg1;
_Py_UopsSymbol *out;
out = NULL;
stack_pointer[-1] = out;
break;
}
@ -1314,7 +1509,7 @@ class TestGeneratedAbstractCases(unittest.TestCase):
def test_missing_override_failure(self):
input = """
pure op(OP, (arg1 -- out)) {
spam();
SPAM();
}
"""
input2 = """

View File

@ -785,8 +785,6 @@ class CheckEvents(MonitoringTestBase, unittest.TestCase):
def check_events(self, func, expected, tool=TEST_TOOL, recorders=(ExceptionRecorder,)):
events = self.get_events(func, tool, recorders)
if events != expected:
print(events, file = sys.stderr)
self.assertEqual(events, expected)
def check_balanced(self, func, recorders):

File diff suppressed because it is too large Load Diff

View File

@ -274,7 +274,6 @@ static void monitor_throw(PyThreadState *tstate,
_PyInterpreterFrame *frame,
_Py_CODEUNIT *instr);
static int check_args_iterable(PyThreadState *, PyObject *func, PyObject *vararg);
static int get_exception_handler(PyCodeObject *, int, int*, int*, int*);
static _PyInterpreterFrame *
_PyEvalFramePushAndInit_Ex(PyThreadState *tstate, _PyStackRef func,
@ -394,6 +393,13 @@ const _Py_SpecialMethod _Py_SpecialMethods[] = {
}
};
const size_t _Py_FunctionAttributeOffsets[] = {
[MAKE_FUNCTION_CLOSURE] = offsetof(PyFunctionObject, func_closure),
[MAKE_FUNCTION_ANNOTATIONS] = offsetof(PyFunctionObject, func_annotations),
[MAKE_FUNCTION_KWDEFAULTS] = offsetof(PyFunctionObject, func_kwdefaults),
[MAKE_FUNCTION_DEFAULTS] = offsetof(PyFunctionObject, func_defaults),
[MAKE_FUNCTION_ANNOTATE] = offsetof(PyFunctionObject, func_annotate),
};
// PEP 634: Structural Pattern Matching
@ -1036,6 +1042,7 @@ tier2_dispatch:
uopcode = next_uop->opcode;
#ifdef Py_DEBUG
if (lltrace >= 3) {
dump_stack(frame, stack_pointer);
if (next_uop->opcode == _START_EXECUTOR) {
printf("%4d uop: ", 0);
}
@ -1043,8 +1050,7 @@ tier2_dispatch:
printf("%4d uop: ", (int)(next_uop - current_executor->trace));
}
_PyUOpPrint(next_uop);
printf(" stack_level=%d\n",
(int)(stack_pointer - _PyFrame_Stackbase(frame)));
printf("\n");
}
#endif
next_uop++;
@ -2920,11 +2926,11 @@ _PyEval_CheckExceptStarTypeValid(PyThreadState *tstate, PyObject* right)
return 0;
}
static int
check_args_iterable(PyThreadState *tstate, PyObject *func, PyObject *args)
int
_Py_Check_ArgsIterable(PyThreadState *tstate, PyObject *func, PyObject *args)
{
if (Py_TYPE(args)->tp_iter == NULL && !PySequence_Check(args)) {
/* check_args_iterable() may be called with a live exception:
/* _Py_Check_ArgsIterable() may be called with a live exception:
* clear it to prevent calling _PyObject_FunctionStr() with an
* exception set. */
_PyErr_Clear(tstate);

View File

@ -108,6 +108,7 @@ do { \
/* Do interpreter dispatch accounting for tracing and instrumentation */
#define DISPATCH() \
{ \
assert(frame->stackpointer == NULL); \
NEXTOPARG(); \
PRE_DISPATCH_GOTO(); \
DISPATCH_GOTO(); \

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -115,7 +115,7 @@ static void *opcode_targets[256] = {
&&TARGET_UNPACK_EX,
&&TARGET_UNPACK_SEQUENCE,
&&TARGET_YIELD_VALUE,
&&TARGET__DO_CALL_FUNCTION_EX,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,

View File

@ -182,8 +182,10 @@ dummy_func(void) {
res = sym_new_type(ctx, &PyFloat_Type);
}
}
else {
res = sym_new_unknown(ctx);
}
}
op(_BINARY_OP_ADD_INT, (left, right -- res)) {
if (sym_is_const(left) && sym_is_const(right) &&
@ -448,8 +450,10 @@ dummy_func(void) {
top = bottom;
}
op(_SWAP, (bottom, unused[oparg-2], top --
top, unused[oparg-2], bottom)) {
op(_SWAP, (bottom_in, unused[oparg-2], top_in --
top_out, unused[oparg-2], bottom_out)) {
bottom_out = bottom_in;
top_out = top_in;
}
op(_LOAD_ATTR_INSTANCE_VALUE, (offset/1, owner -- attr, null if (oparg & 1))) {
@ -479,10 +483,8 @@ dummy_func(void) {
op(_LOAD_ATTR, (owner -- attr, self_or_null if (oparg & 1))) {
(void)owner;
attr = sym_new_not_null(ctx);
if (oparg & 1) {
self_or_null = sym_new_unknown(ctx);
}
}
op(_LOAD_ATTR_MODULE, (index/1, owner -- attr, null if (oparg & 1))) {
(void)index;
@ -570,7 +572,6 @@ dummy_func(void) {
op(_INIT_CALL_PY_EXACT_ARGS, (callable, self_or_null, args[oparg] -- new_frame: _Py_UOpsAbstractFrame *)) {
int argcount = oparg;
(void)callable;
PyCodeObject *co = NULL;
@ -647,11 +648,10 @@ dummy_func(void) {
}
op(_RETURN_VALUE, (retval -- res)) {
SYNC_SP();
SAVE_STACK();
ctx->frame->stack_pointer = stack_pointer;
frame_pop(ctx);
stack_pointer = ctx->frame->stack_pointer;
res = retval;
/* Stack space handling */
assert(corresponding_check_stack == NULL);
@ -666,6 +666,8 @@ dummy_func(void) {
// might be impossible, but bailing is still safe
ctx->done = true;
}
RELOAD_STACK();
res = retval;
}
op(_RETURN_GENERATOR, ( -- res)) {

View File

@ -93,9 +93,9 @@
}
case _END_SEND: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[-2] = value;
_Py_UopsSymbol *val;
val = sym_new_not_null(ctx);
stack_pointer[-2] = val;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
@ -630,7 +630,6 @@
ctx->frame->stack_pointer = stack_pointer;
frame_pop(ctx);
stack_pointer = ctx->frame->stack_pointer;
res = retval;
/* Stack space handling */
assert(corresponding_check_stack == NULL);
assert(co != NULL);
@ -643,6 +642,7 @@
// might be impossible, but bailing is still safe
ctx->done = true;
}
res = retval;
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@ -832,9 +832,7 @@
_Py_UopsSymbol **res;
_Py_UopsSymbol *null = NULL;
res = &stack_pointer[0];
for (int _i = 1; --_i >= 0;) {
res[_i] = sym_new_not_null(ctx);
}
res[0] = sym_new_not_null(ctx);
null = sym_new_null(ctx);
if (oparg & 1) stack_pointer[1] = null;
stack_pointer += 1 + (oparg & 1);
@ -1021,9 +1019,7 @@
owner = stack_pointer[-1];
(void)owner;
attr = sym_new_not_null(ctx);
if (oparg & 1) {
self_or_null = sym_new_unknown(ctx);
}
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = self_or_null;
stack_pointer += (oparg & 1);
@ -1114,11 +1110,17 @@
PyModuleObject *mod = (PyModuleObject *)sym_get_const(owner);
assert(PyModule_CheckExact(mod));
PyObject *dict = mod->md_dict;
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
PyObject *res = convert_global_to_const(this_instr, dict);
if (res != NULL) {
this_instr[-1].opcode = _POP_TOP;
attr = sym_new_const(ctx, res);
}
stack_pointer += -(oparg & 1);
assert(WITHIN_STACK_BOUNDS());
}
if (attr == NULL) {
/* No conversion made. We don't know what `attr` is. */
@ -1239,7 +1241,11 @@
res = sym_new_type(ctx, &PyBool_Type);
}
else {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
res = _Py_uop_sym_new_not_null(ctx);
stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
}
stack_pointer[-2] = res;
stack_pointer += -1;
@ -1659,12 +1665,13 @@
/* _MONITOR_CALL is not a viable micro-op for tier 2 */
case _PY_FRAME_GENERAL: {
_Py_UopsSymbol **args;
_Py_UopsSymbol *self_or_null;
_Py_UopsSymbol *callable;
_Py_UOpsAbstractFrame *new_frame;
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
stack_pointer += -2 - oparg;
assert(WITHIN_STACK_BOUNDS());
(void)(self_or_null);
(void)(callable);
PyCodeObject *co = NULL;
@ -1675,8 +1682,8 @@
break;
}
new_frame = frame_new(ctx, co, 0, NULL, 0);
stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)new_frame;
stack_pointer += -1 - oparg;
stack_pointer[0] = (_Py_UopsSymbol *)new_frame;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
@ -1690,14 +1697,12 @@
}
case _EXPAND_METHOD: {
_Py_UopsSymbol *method;
_Py_UopsSymbol **method;
_Py_UopsSymbol **self;
method = &stack_pointer[-2 - oparg];
self = &stack_pointer[-1 - oparg];
method = sym_new_not_null(ctx);
for (int _i = 1; --_i >= 0;) {
self[_i] = sym_new_not_null(ctx);
}
stack_pointer[-2 - oparg] = method;
method[0] = sym_new_not_null(ctx);
self[0] = sym_new_not_null(ctx);
break;
}
@ -1774,6 +1779,8 @@
(void)callable;
PyCodeObject *co = NULL;
assert((this_instr + 2)->opcode == _PUSH_FRAME);
stack_pointer += -2 - oparg;
assert(WITHIN_STACK_BOUNDS());
co = get_code_with_logging((this_instr + 2));
if (co == NULL) {
ctx->done = true;
@ -1791,8 +1798,8 @@
} else {
new_frame = frame_new(ctx, co, 0, NULL, 0);
}
stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)new_frame;
stack_pointer += -1 - oparg;
stack_pointer[0] = (_Py_UopsSymbol *)new_frame;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
@ -1825,10 +1832,12 @@
if (first_valid_check_stack == NULL) {
first_valid_check_stack = corresponding_check_stack;
}
else if (corresponding_check_stack) {
else {
if (corresponding_check_stack) {
// delete all but the first valid _CHECK_STACK_SPACE
corresponding_check_stack->opcode = _NOP;
}
}
corresponding_check_stack = NULL;
break;
}
@ -2005,6 +2014,24 @@
/* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */
case _MAYBE_EXPAND_METHOD_KW: {
_Py_UopsSymbol **func;
_Py_UopsSymbol **maybe_self;
_Py_UopsSymbol **args;
_Py_UopsSymbol *kwnames_out;
func = &stack_pointer[-3 - oparg];
maybe_self = &stack_pointer[-2 - oparg];
args = &stack_pointer[-1 - oparg];
func[0] = sym_new_not_null(ctx);
maybe_self[0] = sym_new_not_null(ctx);
for (int _i = oparg; --_i >= 0;) {
args[_i] = sym_new_not_null(ctx);
}
kwnames_out = sym_new_not_null(ctx);
stack_pointer[-1] = kwnames_out;
break;
}
/* _DO_CALL_KW is not a viable micro-op for tier 2 */
case _PY_FRAME_KW: {
@ -2038,17 +2065,12 @@
}
case _EXPAND_METHOD_KW: {
_Py_UopsSymbol *method;
_Py_UopsSymbol **method;
_Py_UopsSymbol **self;
_Py_UopsSymbol *kwnames;
method = &stack_pointer[-3 - oparg];
self = &stack_pointer[-2 - oparg];
method = sym_new_not_null(ctx);
for (int _i = 1; --_i >= 0;) {
self[_i] = sym_new_not_null(ctx);
}
kwnames = sym_new_not_null(ctx);
stack_pointer[-3 - oparg] = method;
stack_pointer[-1] = kwnames;
method[0] = sym_new_not_null(ctx);
self[0] = sym_new_not_null(ctx);
break;
}
@ -2067,7 +2089,17 @@
/* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
/* __DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
case _MAKE_CALLARGS_A_TUPLE: {
_Py_UopsSymbol *tuple;
_Py_UopsSymbol *kwargs_out = NULL;
tuple = sym_new_not_null(ctx);
kwargs_out = sym_new_not_null(ctx);
stack_pointer[-1 - (oparg & 1)] = tuple;
if (oparg & 1) stack_pointer[-(oparg & 1)] = kwargs_out;
break;
}
/* _DO_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
case _MAKE_FUNCTION: {
_Py_UopsSymbol *func;
@ -2077,9 +2109,9 @@
}
case _SET_FUNCTION_ATTRIBUTE: {
_Py_UopsSymbol *func_st;
func_st = sym_new_not_null(ctx);
stack_pointer[-2] = func_st;
_Py_UopsSymbol *func_out;
func_out = sym_new_not_null(ctx);
stack_pointer[-2] = func_out;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
@ -2098,14 +2130,14 @@
assert(framesize > 0);
assert(framesize <= curr_space);
curr_space -= framesize;
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
co = get_code(this_instr);
if (co == NULL) {
// might be impossible, but bailing is still safe
ctx->done = true;
}
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
@ -2174,7 +2206,9 @@
res = sym_new_type(ctx, &PyFloat_Type);
}
}
else {
res = sym_new_unknown(ctx);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -2182,12 +2216,16 @@
}
case _SWAP: {
_Py_UopsSymbol *top;
_Py_UopsSymbol *bottom;
top = stack_pointer[-1];
bottom = stack_pointer[-2 - (oparg-2)];
stack_pointer[-2 - (oparg-2)] = top;
stack_pointer[-1] = bottom;
_Py_UopsSymbol *top_in;
_Py_UopsSymbol *bottom_in;
_Py_UopsSymbol *top_out;
_Py_UopsSymbol *bottom_out;
top_in = stack_pointer[-1];
bottom_in = stack_pointer[-2 - (oparg-2)];
bottom_out = bottom_in;
top_out = top_in;
stack_pointer[-2 - (oparg-2)] = top_out;
stack_pointer[-1] = bottom_out;
break;
}
@ -2213,7 +2251,11 @@
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, value != Py_True);
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -2226,7 +2268,11 @@
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, value != Py_False);
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@ -2239,14 +2285,22 @@
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, !Py_IsNone(value));
}
else if (sym_has_type(flag)) {
else {
if (sym_has_type(flag)) {
assert(!sym_matches_type(flag, &_PyNone_Type));
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, true);
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
}
break;
}
@ -2256,14 +2310,22 @@
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, Py_IsNone(value));
}
else if (sym_has_type(flag)) {
else {
if (sym_has_type(flag)) {
assert(!sym_matches_type(flag, &_PyNone_Type));
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
eliminate_pop_guard(this_instr, false);
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
}
break;
}

View File

@ -1,13 +1,13 @@
from dataclasses import dataclass
from dataclasses import dataclass, field
import itertools
import lexer
import parser
import re
from typing import Optional
@dataclass
class Properties:
escapes: bool
escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]]
error_with_pop: bool
error_without_pop: bool
deopts: bool
@ -29,14 +29,21 @@ class Properties:
needs_prev: bool = False
def dump(self, indent: str) -> None:
print(indent, end="")
text = ", ".join([f"{key}: {value}" for (key, value) in self.__dict__.items()])
simple_properties = self.__dict__.copy()
del simple_properties["escaping_calls"]
text = "escaping_calls:\n"
for tkns in self.escaping_calls.values():
text += f"{indent} {tkns}\n"
text += ", ".join([f"{key}: {value}" for (key, value) in simple_properties.items()])
print(indent, text, sep="")
@staticmethod
def from_list(properties: list["Properties"]) -> "Properties":
escaping_calls: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {}
for p in properties:
escaping_calls.update(p.escaping_calls)
return Properties(
escapes=any(p.escapes for p in properties),
escaping_calls=escaping_calls,
error_with_pop=any(p.error_with_pop for p in properties),
error_without_pop=any(p.error_without_pop for p in properties),
deopts=any(p.deopts for p in properties),
@ -59,9 +66,12 @@ class Properties:
def infallible(self) -> bool:
return not self.error_with_pop and not self.error_without_pop
@property
def escapes(self) -> bool:
return bool(self.escaping_calls)
SKIP_PROPERTIES = Properties(
escapes=False,
escaping_calls={},
error_with_pop=False,
error_without_pop=False,
deopts=False,
@ -156,6 +166,7 @@ class Uop:
stack: StackEffect
caches: list[CacheEntry]
deferred_refs: dict[lexer.Token, str | None]
output_stores: list[lexer.Token]
body: list[lexer.Token]
properties: Properties
_size: int = -1
@ -322,11 +333,24 @@ def analyze_stack(
]
# Mark variables with matching names at the base of the stack as "peek"
modified = False
for input, output in zip(inputs, outputs):
if input.name == output.name and not modified:
input_names: dict[str, lexer.Token] = { i.name : i.first_token for i in op.inputs if i.name != "unused" }
for input, output in itertools.zip_longest(inputs, outputs):
if output is None:
pass
elif input is None:
if output.name in input_names:
raise analysis_error(
f"Reuse of variable '{output.name}' at different stack location",
input_names[output.name])
elif input.name == output.name:
if not modified:
input.peek = output.peek = True
else:
modified = True
if output.name in input_names:
raise analysis_error(
f"Reuse of variable '{output.name}' at different stack location",
input_names[output.name])
if isinstance(op, parser.InstDef):
output_names = [out.name for out in outputs]
for input in inputs:
@ -354,21 +378,46 @@ def analyze_caches(inputs: list[parser.InputEffect]) -> list[CacheEntry]:
return [CacheEntry(i.name, int(i.size)) for i in caches]
def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]:
"""Look for PyStackRef_FromPyObjectNew() calls"""
def find_assignment_target(idx: int) -> list[lexer.Token]:
def find_assignment_target(node: parser.InstDef, idx: int) -> list[lexer.Token]:
"""Find the tokens that make up the left-hand side of an assignment"""
offset = 1
for tkn in reversed(node.block.tokens[: idx - 1]):
if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE":
return node.block.tokens[idx - offset : idx - 1]
offset = 0
for tkn in reversed(node.block.tokens[: idx]):
if tkn.kind in {"SEMI", "LBRACE", "RBRACE"}:
return node.block.tokens[idx - offset : idx]
offset += 1
return []
def find_stores_outputs(node: parser.InstDef) -> list[lexer.Token]:
res: list[lexer.Token] = []
outnames = { out.name for out in node.outputs }
innames = { out.name for out in node.inputs }
for idx, tkn in enumerate(node.block.tokens):
if tkn.kind == "AND":
name = node.block.tokens[idx+1]
if name.text in outnames:
res.append(name)
if tkn.kind != "EQUALS":
continue
lhs = find_assignment_target(node, idx)
assert lhs
while lhs and lhs[0].kind == "COMMENT":
lhs = lhs[1:]
if len(lhs) != 1 or lhs[0].kind != "IDENTIFIER":
continue
name = lhs[0]
if name.text in innames:
raise analysis_error(f"Cannot assign to input variable '{name.text}'", name)
if name.text in outnames:
res.append(name)
return res
def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]:
"""Look for PyStackRef_FromPyObjectNew() calls"""
def in_frame_push(idx: int) -> bool:
for tkn in reversed(node.block.tokens[: idx - 1]):
if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE":
if tkn.kind in {"SEMI", "LBRACE", "RBRACE"}:
return False
if tkn.kind == "IDENTIFIER" and tkn.text == "_PyFrame_PushUnchecked":
return True
@ -386,7 +435,7 @@ def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]
continue
raise analysis_error("Expected '=' before PyStackRef_FromPyObjectNew", tkn)
lhs = find_assignment_target(idx)
lhs = find_assignment_target(node, idx - 1)
if len(lhs) == 0:
raise analysis_error(
"PyStackRef_FromPyObjectNew() must be assigned to an output", tkn
@ -406,9 +455,13 @@ def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]
)
name = lhs[0].text
if not any(var.name == name for var in node.outputs):
match = (
any(var.name == name for var in node.inputs)
or any(var.name == name for var in node.outputs)
)
if not match:
raise analysis_error(
f"PyStackRef_FromPyObjectNew() must be assigned to an output, not '{name}'",
f"PyStackRef_FromPyObjectNew() must be assigned to an input or output, not '{name}'",
tkn,
)
@ -461,114 +514,182 @@ def has_error_without_pop(op: parser.InstDef) -> bool:
NON_ESCAPING_FUNCTIONS = (
"PyStackRef_FromPyObjectSteal",
"PyCFunction_GET_FLAGS",
"PyCFunction_GET_FUNCTION",
"PyCFunction_GET_SELF",
"PyCell_GetRef",
"PyCell_New",
"PyCell_SwapTakeRef",
"PyExceptionInstance_Class",
"PyException_GetCause",
"PyException_GetContext",
"PyException_GetTraceback",
"PyFloat_AS_DOUBLE",
"PyFloat_FromDouble",
"PyFunction_GET_CODE",
"PyFunction_GET_GLOBALS",
"PyList_GET_ITEM",
"PyList_GET_SIZE",
"PyList_SET_ITEM",
"PyLong_AsLong",
"PyLong_FromLong",
"PyLong_FromSsize_t",
"PySlice_New",
"PyStackRef_AsPyObjectBorrow",
"PyStackRef_AsPyObjectNew",
"PyStackRef_AsPyObjectSteal",
"PyStackRef_CLEAR",
"PyStackRef_CLOSE",
"PyStackRef_DUP",
"PyStackRef_CLEAR",
"PyStackRef_IsNull",
"PyStackRef_TYPE",
"PyStackRef_False",
"PyStackRef_True",
"PyStackRef_None",
"PyStackRef_Is",
"PyStackRef_FromPyObjectNew",
"PyStackRef_AsPyObjectNew",
"PyStackRef_FromPyObjectImmortal",
"Py_INCREF",
"_PyManagedDictPointer_IsValues",
"_PyObject_GetManagedDict",
"_PyObject_ManagedDictPointer",
"_PyObject_InlineValues",
"_PyDictValues_AddToInsertionOrder",
"Py_DECREF",
"Py_XDECREF",
"_Py_DECREF_SPECIALIZED",
"DECREF_INPUTS_AND_REUSE_FLOAT",
"PyStackRef_FromPyObjectNew",
"PyStackRef_FromPyObjectSteal",
"PyStackRef_Is",
"PyStackRef_IsNull",
"PyStackRef_None",
"PyStackRef_TYPE",
"PyStackRef_True",
"PyTuple_GET_ITEM",
"PyTuple_GET_SIZE",
"PyType_HasFeature",
"PyUnicode_Append",
"_PyLong_IsZero",
"PyUnicode_Concat",
"PyUnicode_GET_LENGTH",
"PyUnicode_READ_CHAR",
"Py_ARRAY_LENGTH",
"Py_CLEAR",
"Py_DECREF",
"Py_FatalError",
"Py_INCREF",
"Py_IS_TYPE",
"Py_NewRef",
"Py_REFCNT",
"Py_SIZE",
"Py_TYPE",
"PyList_GET_ITEM",
"PyList_SET_ITEM",
"PyTuple_GET_ITEM",
"PyList_GET_SIZE",
"PyTuple_GET_SIZE",
"Py_ARRAY_LENGTH",
"Py_UNREACHABLE",
"Py_Unicode_GET_LENGTH",
"PyUnicode_READ_CHAR",
"_Py_SINGLETON",
"PyUnicode_GET_LENGTH",
"_PyLong_IsCompact",
"_PyLong_IsNonNegativeCompact",
"Py_XDECREF",
"_PyCode_CODE",
"_PyDictValues_AddToInsertionOrder",
"_PyErr_Occurred",
"_PyEval_FrameClearAndPop",
"_PyFrame_GetCode",
"_PyFrame_IsIncomplete",
"_PyFrame_PushUnchecked",
"_PyFrame_SetStackPointer",
"_PyFrame_StackPush",
"_PyFunction_SetVersion",
"_PyGen_GetGeneratorFromFrame",
"_PyInterpreterState_GET",
"_PyList_AppendTakeRef",
"_PyList_FromStackRefSteal",
"_PyList_ITEMS",
"_PyLong_Add",
"_PyLong_CompactValue",
"_PyLong_DigitCount",
"_Py_NewRef",
"_Py_IsImmortal",
"PyLong_FromLong",
"_Py_STR",
"_PyLong_Add",
"_PyLong_IsCompact",
"_PyLong_IsNonNegativeCompact",
"_PyLong_IsZero",
"_PyLong_Multiply",
"_PyLong_Subtract",
"Py_NewRef",
"_PyList_ITEMS",
"_PyTuple_ITEMS",
"_PyList_AppendTakeRef",
"_Py_atomic_load_uintptr_relaxed",
"_PyFrame_GetCode",
"_PyManagedDictPointer_IsValues",
"_PyObject_GC_IS_TRACKED",
"_PyObject_GC_MAY_BE_TRACKED",
"_PyObject_GC_TRACK",
"_PyObject_GetManagedDict",
"_PyObject_InlineValues",
"_PyObject_ManagedDictPointer",
"_PyThreadState_HasStackSpace",
"_PyUnicode_Equal",
"_PyFrame_SetStackPointer",
"_PyType_HasFeature",
"PyUnicode_Concat",
"PySlice_New",
"_Py_LeaveRecursiveCallPy",
"CALL_STAT_INC",
"STAT_INC",
"maybe_lltrace_resume_frame",
"_PyUnicode_JoinArray",
"_PyEval_FrameClearAndPop",
"_PyFrame_StackPush",
"PyCell_New",
"PyFloat_AS_DOUBLE",
"_PyFrame_PushUnchecked",
"Py_FatalError",
"STACKREFS_TO_PYOBJECTS",
"STACKREFS_TO_PYOBJECTS_CLEANUP",
"CONVERSION_FAILED",
"_PyList_FromStackRefSteal",
"_PyTuple_FromArraySteal",
"_PyTuple_FromStackRefSteal",
"_Py_set_eval_breaker_bit"
"_PyTuple_ITEMS",
"_PyType_HasFeature",
"_PyType_NewManagedObject",
"_PyUnicode_Equal",
"_PyUnicode_JoinArray",
"_Py_CHECK_EMSCRIPTEN_SIGNALS_PERIODICALLY",
"_Py_DECREF_NO_DEALLOC",
"_Py_DECREF_SPECIALIZED",
"_Py_EnterRecursiveCallTstateUnchecked",
"_Py_ID",
"_Py_IsImmortal",
"_Py_IsImmortalLoose",
"_Py_LeaveRecursiveCallPy",
"_Py_LeaveRecursiveCallTstate",
"_Py_NewRef",
"_Py_SINGLETON",
"_Py_STR",
"_Py_atomic_load_uintptr_relaxed",
"_Py_set_eval_breaker_bit",
"advance_backoff_counter",
"assert",
"backoff_counter_triggers",
"initial_temperature_backoff_counter",
"maybe_lltrace_resume_frame",
"restart_backoff_counter",
)
ESCAPING_FUNCTIONS = (
"import_name",
"import_from",
)
def find_stmt_start(node: parser.InstDef, idx: int) -> lexer.Token:
assert idx < len(node.block.tokens)
while True:
tkn = node.block.tokens[idx-1]
if tkn.kind in {"SEMI", "LBRACE", "RBRACE"}:
break
idx -= 1
assert idx > 0
while node.block.tokens[idx].kind == "COMMENT":
idx += 1
return node.block.tokens[idx]
def makes_escaping_api_call(instr: parser.InstDef) -> bool:
if "CALL_INTRINSIC" in instr.name:
return True
if instr.name == "_BINARY_OP":
return True
tkns = iter(instr.tokens)
for tkn in tkns:
if tkn.kind != lexer.IDENTIFIER:
continue
def find_stmt_end(node: parser.InstDef, idx: int) -> lexer.Token:
assert idx < len(node.block.tokens)
while True:
idx += 1
tkn = node.block.tokens[idx]
if tkn.kind == "SEMI":
return node.block.tokens[idx+1]
def check_escaping_calls(instr: parser.InstDef, escapes: dict[lexer.Token, tuple[lexer.Token, lexer.Token]]) -> None:
calls = {escapes[t][0] for t in escapes}
in_if = 0
tkn_iter = iter(instr.block.tokens)
for tkn in tkn_iter:
if tkn.kind == "IF":
next(tkn_iter)
in_if = 1
if tkn.kind == "IDENTIFIER" and tkn.text in ("DEOPT_IF", "ERROR_IF"):
next(tkn_iter)
in_if = 1
elif tkn.kind == "LPAREN" and in_if:
in_if += 1
elif tkn.kind == "RPAREN":
if in_if:
in_if -= 1
elif tkn in calls and in_if:
raise analysis_error(f"Escaping call '{tkn.text} in condition", tkn)
def find_escaping_api_calls(instr: parser.InstDef) -> dict[lexer.Token, tuple[lexer.Token, lexer.Token]]:
result: dict[lexer.Token, tuple[lexer.Token, lexer.Token]] = {}
tokens = instr.block.tokens
for idx, tkn in enumerate(tokens):
try:
next_tkn = next(tkns)
except StopIteration:
return False
next_tkn = tokens[idx+1]
except IndexError:
break
if tkn.kind == "SWITCH":
raise analysis_error(f"switch statements are not supported due to their complex flow control. Sorry.", tkn)
if next_tkn.kind != lexer.LPAREN:
continue
if tkn.text in ESCAPING_FUNCTIONS:
return True
if tkn.text == "tp_vectorcall":
return True
if not tkn.text.startswith("Py") and not tkn.text.startswith("_Py"):
if tkn.kind == lexer.IDENTIFIER:
if tkn.text.upper() == tkn.text:
# simple macro
continue
#if not tkn.text.startswith(("Py", "_Py", "monitor")):
# continue
if tkn.text.startswith(("sym_", "optimize_")):
# Optimize functions
continue
if tkn.text.endswith("Check"):
continue
@ -578,8 +699,18 @@ def makes_escaping_api_call(instr: parser.InstDef) -> bool:
continue
if tkn.text in NON_ESCAPING_FUNCTIONS:
continue
return True
return False
elif tkn.kind == "RPAREN":
prev = tokens[idx-1]
if prev.text.endswith("_t") or prev.text == "*" or prev.text == "int":
#cast
continue
elif tkn.kind != "RBRACKET":
continue
start = find_stmt_start(instr, idx)
end = find_stmt_end(instr, idx)
result[start] = tkn, end
check_escaping_calls(instr, result)
return result
EXITS = {
@ -651,6 +782,7 @@ def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
def compute_properties(op: parser.InstDef) -> Properties:
escaping_calls = find_escaping_api_calls(op)
has_free = (
variable_used(op, "PyCell_New")
or variable_used(op, "PyCell_GetRef")
@ -671,7 +803,7 @@ def compute_properties(op: parser.InstDef) -> Properties:
error_with_pop = has_error_with_pop(op)
error_without_pop = has_error_without_pop(op)
return Properties(
escapes=makes_escaping_api_call(op),
escaping_calls=escaping_calls,
error_with_pop=error_with_pop,
error_without_pop=error_without_pop,
deopts=deopts_if,
@ -706,6 +838,7 @@ def make_uop(
stack=analyze_stack(op),
caches=analyze_caches(inputs),
deferred_refs=analyze_deferred_refs(op),
output_stores=find_stores_outputs(op),
body=op.block.tokens,
properties=compute_properties(op),
)
@ -726,6 +859,7 @@ def make_uop(
stack=analyze_stack(op, bit),
caches=analyze_caches(inputs),
deferred_refs=analyze_deferred_refs(op),
output_stores=find_stores_outputs(op),
body=op.block.tokens,
properties=properties,
)
@ -749,6 +883,7 @@ def make_uop(
stack=analyze_stack(op),
caches=analyze_caches(inputs),
deferred_refs=analyze_deferred_refs(op),
output_stores=find_stores_outputs(op),
body=op.block.tokens,
properties=properties,
)

View File

@ -18,8 +18,9 @@ class CWriter:
def set_position(self, tkn: Token) -> None:
if self.last_token is not None:
if self.last_token.line < tkn.line:
if self.last_token.end_line < tkn.line:
self.out.write("\n")
if self.last_token.line < tkn.line:
if self.line_directives:
self.out.write(f'#line {tkn.line} "{tkn.filename}"\n')
self.out.write(" " * self.indents[-1])
@ -91,6 +92,8 @@ class CWriter:
self.maybe_dedent(tkn.text)
self.set_position(tkn)
self.emit_text(tkn.text)
if tkn.kind == "CMACRO":
self.newline = True
self.maybe_indent(tkn.text)
def emit_str(self, txt: str) -> None:

View File

@ -9,10 +9,39 @@ from analyzer import (
analysis_error,
)
from cwriter import CWriter
from typing import Callable, Mapping, TextIO, Iterator
from typing import Callable, Mapping, TextIO, Iterator, Iterable
from lexer import Token
from stack import Stack
from stack import Stack, Local, Storage, StackError
# Set this to true for voluminous output showing state of stack and locals
PRINT_STACKS = False
class TokenIterator:
look_ahead: Token | None
iterator: Iterator[Token]
def __init__(self, tkns: Iterable[Token]):
self.iterator = iter(tkns)
self.look_ahead = None
def __iter__(self) -> "TokenIterator":
return self
def __next__(self) -> Token:
if self.look_ahead is None:
return next(self.iterator)
else:
res = self.look_ahead
self.look_ahead = None
return res
def peek(self) -> Token | None:
if self.look_ahead is None:
for tkn in self.iterator:
self.look_ahead = tkn
break
return self.look_ahead
ROOT = Path(__file__).parent.parent.parent
DEFAULT_INPUT = (ROOT / "Python/bytecodes.c").absolute().as_posix()
@ -47,22 +76,28 @@ def write_header(
)
def emit_to(out: CWriter, tkn_iter: Iterator[Token], end: str) -> None:
def emit_to(out: CWriter, tkn_iter: TokenIterator, end: str) -> Token:
parens = 0
for tkn in tkn_iter:
if tkn.kind == end and parens == 0:
return
return tkn
if tkn.kind == "LPAREN":
parens += 1
if tkn.kind == "RPAREN":
parens -= 1
out.emit(tkn)
raise analysis_error(f"Expecting {end}. Reached end of file", tkn)
ReplacementFunctionType = Callable[
[Token, Iterator[Token], Uop, Stack, Instruction | None], None
[Token, TokenIterator, Uop, Storage, Instruction | None], bool
]
def always_true(tkn: Token | None) -> bool:
if tkn is None:
return False
return tkn.text in {"true", "1"}
class Emitter:
out: CWriter
@ -75,21 +110,41 @@ class Emitter:
"ERROR_IF": self.error_if,
"ERROR_NO_POP": self.error_no_pop,
"DECREF_INPUTS": self.decref_inputs,
"DEAD": self.kill,
"INPUTS_DEAD": self.kill_inputs,
"SYNC_SP": self.sync_sp,
"PyStackRef_FromPyObjectNew": self.py_stack_ref_from_py_object_new,
"SAVE_STACK": self.save_stack,
"RELOAD_STACK": self.reload_stack,
"PyStackRef_CLOSE": self.stackref_close,
"PyStackRef_AsPyObjectSteal": self.stackref_steal,
"DISPATCH": self.dispatch
}
self.out = out
def dispatch(
self,
tkn: Token,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> bool:
self.emit(tkn)
return False
def deopt_if(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
unused: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
self.out.emit_at("DEOPT_IF", tkn)
self.out.emit(next(tkn_iter))
lparen = next(tkn_iter)
self.emit(lparen)
assert lparen.kind == "LPAREN"
first_tkn = tkn_iter.peek()
emit_to(self.out, tkn_iter, "RPAREN")
next(tkn_iter) # Semi colon
self.out.emit(", ")
@ -97,25 +152,30 @@ class Emitter:
assert inst.family is not None
self.out.emit(inst.family.name)
self.out.emit(");\n")
return not always_true(first_tkn)
exit_if = deopt_if
def error_if(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
self.out.emit_at("if ", tkn)
self.out.emit(next(tkn_iter))
lparen = next(tkn_iter)
self.emit(lparen)
assert lparen.kind == "LPAREN"
first_tkn = tkn_iter.peek()
emit_to(self.out, tkn_iter, "COMMA")
label = next(tkn_iter).text
next(tkn_iter) # RPAREN
next(tkn_iter) # Semi colon
self.out.emit(") ")
c_offset = stack.peek_offset()
storage.clear_inputs("at ERROR_IF")
c_offset = storage.stack.peek_offset()
try:
offset = -int(c_offset)
except ValueError:
@ -130,33 +190,35 @@ class Emitter:
self.out.emit(";\n")
else:
self.out.emit("{\n")
stack.flush_locally(self.out)
storage.copy().flush(self.out)
self.out.emit("goto ")
self.out.emit(label)
self.out.emit(";\n")
self.out.emit("}\n")
return not always_true(first_tkn)
def error_no_pop(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
next(tkn_iter) # LPAREN
next(tkn_iter) # RPAREN
next(tkn_iter) # Semi colon
self.out.emit_at("goto error;", tkn)
return False
def decref_inputs(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
next(tkn_iter)
next(tkn_iter)
next(tkn_iter)
@ -178,59 +240,278 @@ class Emitter:
self.out.emit(f"PyStackRef_XCLOSE({var.name});\n")
else:
self.out.emit(f"PyStackRef_CLOSE({var.name});\n")
for input in storage.inputs:
input.defined = False
return True
def kill_inputs(
self,
tkn: Token,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> bool:
next(tkn_iter)
next(tkn_iter)
next(tkn_iter)
for var in storage.inputs:
var.defined = False
return True
def kill(
self,
tkn: Token,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> bool:
next(tkn_iter)
name_tkn = next(tkn_iter)
name = name_tkn.text
next(tkn_iter)
next(tkn_iter)
for var in storage.inputs:
if var.name == name:
var.defined = False
break
else:
raise analysis_error(f"'{name}' is not a live input-only variable", name_tkn)
return True
def stackref_close(
self,
tkn: Token,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> bool:
self.out.emit(tkn)
tkn = next(tkn_iter)
assert tkn.kind == "LPAREN"
self.out.emit(tkn)
name = next(tkn_iter)
self.out.emit(name)
if name.kind == "IDENTIFIER":
for var in storage.inputs:
if var.name == name.text:
var.defined = False
rparen = emit_to(self.out, tkn_iter, "RPAREN")
self.emit(rparen)
return True
stackref_steal = stackref_close
def sync_sp(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
next(tkn_iter)
next(tkn_iter)
next(tkn_iter)
stack.flush(self.out)
storage.clear_inputs("when syncing stack")
storage.flush(self.out)
self._print_storage(storage)
return True
def py_stack_ref_from_py_object_new(
def emit_save(self, storage: Storage) -> None:
storage.save(self.out)
self._print_storage(storage)
def save_stack(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
target = uop.deferred_refs[tkn]
if target is None:
# An assignment we don't handle, such as to a pointer or array.
self.out.emit(tkn)
return
) -> bool:
next(tkn_iter)
next(tkn_iter)
next(tkn_iter)
self.emit_save(storage)
return True
self.out.emit(tkn)
emit_to(self.out, tkn_iter, "SEMI")
self.out.emit(";\n")
def emit_reload(self, storage: Storage) -> None:
storage.reload(self.out)
self._print_storage(storage)
def reload_stack(
self,
tkn: Token,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> bool:
next(tkn_iter)
next(tkn_iter)
next(tkn_iter)
self.emit_reload(storage)
return True
def _print_storage(self, storage: Storage) -> None:
if PRINT_STACKS:
self.out.start_line()
self.emit(storage.as_comment())
self.out.start_line()
def _emit_if(
self,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
) -> tuple[bool, Token, Storage]:
"""Returns (reachable?, closing '}', stack)."""
tkn = next(tkn_iter)
assert tkn.kind == "LPAREN"
self.out.emit(tkn)
rparen = emit_to(self.out, tkn_iter, "RPAREN")
self.emit(rparen)
if_storage = storage.copy()
reachable, rbrace, if_storage = self._emit_block(tkn_iter, uop, if_storage, inst, True)
try:
maybe_else = tkn_iter.peek()
if maybe_else and maybe_else.kind == "ELSE":
self._print_storage(storage)
self.emit(rbrace)
self.emit(next(tkn_iter))
maybe_if = tkn_iter.peek()
if maybe_if and maybe_if.kind == "IF":
#Emit extra braces around the if to get scoping right
self.emit(" {\n")
self.emit(next(tkn_iter))
else_reachable, rbrace, else_storage = self._emit_if(tkn_iter, uop, storage, inst)
self.out.start_line()
self.emit("}\n")
else:
else_reachable, rbrace, else_storage = self._emit_block(tkn_iter, uop, storage, inst, True)
if not reachable:
# Discard the if storage
reachable = else_reachable
storage = else_storage
elif not else_reachable:
# Discard the else storage
storage = if_storage
reachable = True
else:
if PRINT_STACKS:
self.emit("/* Merge */\n")
else_storage.merge(if_storage, self.out)
storage = else_storage
self._print_storage(storage)
else:
if reachable:
if PRINT_STACKS:
self.emit("/* Merge */\n")
if_storage.merge(storage, self.out)
storage = if_storage
self._print_storage(storage)
else:
# Discard the if storage
reachable = True
except StackError as ex:
self._print_storage(if_storage)
raise analysis_error(ex.args[0], rbrace) # from None
return reachable, rbrace, storage
def _emit_block(
self,
tkn_iter: TokenIterator,
uop: Uop,
storage: Storage,
inst: Instruction | None,
emit_first_brace: bool
) -> tuple[bool, Token, Storage]:
""" Returns (reachable?, closing '}', stack)."""
braces = 1
out_stores = set(uop.output_stores)
tkn = next(tkn_iter)
reload: Token | None = None
try:
reachable = True
line : int = -1
if tkn.kind != "LBRACE":
raise analysis_error(f"PEP 7: expected '{{', found: {tkn.text}", tkn)
escaping_calls = uop.properties.escaping_calls
if emit_first_brace:
self.emit(tkn)
self._print_storage(storage)
for tkn in tkn_iter:
if PRINT_STACKS and tkn.line != line:
self.out.start_line()
self.emit(storage.as_comment())
self.out.start_line()
line = tkn.line
if tkn in escaping_calls:
if tkn != reload:
self.emit_save(storage)
_, reload = escaping_calls[tkn]
elif tkn == reload:
self.emit_reload(storage)
if tkn.kind == "LBRACE":
self.out.emit(tkn)
braces += 1
elif tkn.kind == "RBRACE":
self._print_storage(storage)
braces -= 1
if braces == 0:
return reachable, tkn, storage
self.out.emit(tkn)
elif tkn.kind == "GOTO":
reachable = False;
self.out.emit(tkn)
elif tkn.kind == "IDENTIFIER":
if tkn.text in self._replacers:
if not self._replacers[tkn.text](tkn, tkn_iter, uop, storage, inst):
reachable = False
else:
if tkn in out_stores:
for out in storage.outputs:
if out.name == tkn.text:
out.defined = True
out.in_memory = False
break
if tkn.text.startswith("DISPATCH"):
self._print_storage(storage)
reachable = False
self.out.emit(tkn)
elif tkn.kind == "IF":
self.out.emit(tkn)
if_reachable, rbrace, storage = self._emit_if(tkn_iter, uop, storage, inst)
if reachable:
reachable = if_reachable
self.out.emit(rbrace)
else:
self.out.emit(tkn)
except StackError as ex:
raise analysis_error(ex.args[0], tkn) from None
raise analysis_error("Expecting closing brace. Reached end of file", tkn)
# Flush the assignment to the stack. Note that we don't flush the
# stack pointer here, and instead are currently relying on initializing
# unused portions of the stack to NULL.
stack.flush_single_var(self.out, target, uop.stack.outputs)
def emit_tokens(
self,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
tkns = uop.body[1:-1]
if not tkns:
return
tkn_iter = iter(tkns)
) -> Storage:
tkn_iter = TokenIterator(uop.body)
self.out.start_line()
for tkn in tkn_iter:
if tkn.kind == "IDENTIFIER" and tkn.text in self._replacers:
self._replacers[tkn.text](tkn, tkn_iter, uop, stack, inst)
else:
self.out.emit(tkn)
_, rbrace, storage = self._emit_block(tkn_iter, uop, storage, inst, False)
try:
self._print_storage(storage)
storage.push_outputs()
self._print_storage(storage)
except StackError as ex:
raise analysis_error(ex.args[0], rbrace)
return storage
def emit(self, txt: str | Token) -> None:
self.out.emit(txt)

View File

@ -79,7 +79,7 @@ for op in operators:
opmap = {pattern.replace("\\", "") or "\\": op for op, pattern in operators.items()}
# Macros
macro = r"# *(ifdef|ifndef|undef|define|error|endif|if|else|include|#)"
macro = r"#.*\n"
CMACRO = "CMACRO"
id_re = r"[a-zA-Z_][0-9a-zA-Z_]*"
@ -333,6 +333,9 @@ def tokenize(src: str, line: int = 1, filename: str = "") -> Iterator[Token]:
line += newlines
else:
begin = line, start - linestart
if kind == CMACRO:
linestart = end
line += 1
if kind != "\n":
yield Token(
filename, kind, text, begin, (line, start - linestart + len(text))

View File

@ -18,11 +18,12 @@ from generators_common import (
ROOT,
write_header,
Emitter,
TokenIterator,
)
from cwriter import CWriter
from typing import TextIO, Iterator
from lexer import Token
from stack import Local, Stack, StackError
from stack import Local, Stack, StackError, Storage
DEFAULT_OUTPUT = ROOT / "Python/optimizer_cases.c.h"
DEFAULT_ABSTRACT_INPUT = (ROOT / "Python/optimizer_bytecodes.c").absolute().as_posix()
@ -45,7 +46,7 @@ def declare_variables(uop: Uop, out: CWriter, skip_inputs: bool) -> None:
variables = {"unused"}
if not skip_inputs:
for var in reversed(uop.stack.inputs):
if var.name not in variables:
if var.used and var.name not in variables:
variables.add(var.name)
if var.condition:
out.emit(f"{type_name(var)}{var.name} = NULL;\n")
@ -65,7 +66,7 @@ def declare_variables(uop: Uop, out: CWriter, skip_inputs: bool) -> None:
def decref_inputs(
out: CWriter,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
inst: Instruction | None,
@ -76,10 +77,24 @@ def decref_inputs(
out.emit_at("", tkn)
def emit_default(out: CWriter, uop: Uop) -> None:
for i, var in enumerate(uop.stack.outputs):
def emit_default(out: CWriter, uop: Uop, stack: Stack) -> None:
for var in reversed(uop.stack.inputs):
stack.pop(var)
top_offset = stack.top_offset.copy()
for var in uop.stack.outputs:
if var.is_array() and not var.peek and not var.name == "unused":
c_offset = top_offset.to_c()
out.emit(f"{var.name} = &stack_pointer[{c_offset}];\n")
top_offset.push(var)
for var in uop.stack.outputs:
local = Local.undefined(var)
stack.push(local)
if var.name != "unused" and not var.peek:
local.defined = True
if var.is_array():
if var.size == "1":
out.emit(f"{var.name}[0] = sym_new_not_null(ctx);\n")
else:
out.emit(f"for (int _i = {var.size}; --_i >= 0;) {{\n")
out.emit(f"{var.name}[_i] = sym_new_not_null(ctx);\n")
out.emit("}\n")
@ -90,6 +105,11 @@ def emit_default(out: CWriter, uop: Uop) -> None:
class OptimizerEmitter(Emitter):
def emit_save(self, storage: Storage) -> None:
storage.flush(self.out)
def emit_reload(self, storage: Storage) -> None:
pass
@ -102,22 +122,18 @@ def write_uop(
skip_inputs: bool,
) -> None:
locals: dict[str, Local] = {}
try:
prototype = override if override else uop
is_override = override is not None
try:
out.start_line()
for var in reversed(prototype.stack.inputs):
code, local = stack.pop(var, extract_bits=True)
if not skip_inputs:
if override:
code_list, storage = Storage.for_uop(stack, prototype, extract_bits=False)
for code in code_list:
out.emit(code)
if local.defined:
locals[local.name] = local
out.emit(stack.define_output_arrays(prototype.stack.outputs))
if debug:
args = []
for var in prototype.stack.inputs:
if not var.peek or is_override:
args.append(var.name)
for input in prototype.stack.inputs:
if not input.peek or override:
args.append(input.name)
out.emit(f'DEBUG_PRINTF({", ".join(args)});\n')
if override:
for cache in uop.caches:
@ -130,20 +146,18 @@ def write_uop(
out.emit(f"{type}{cache.name} = ({cast})this_instr->operand;\n")
if override:
emitter = OptimizerEmitter(out)
emitter.emit_tokens(override, stack, None)
else:
emit_default(out, uop)
for var in prototype.stack.outputs:
if var.name in locals:
local = locals[var.name]
else:
local = Local.local(var)
stack.push(local)
# No reference management of inputs needed.
for var in storage.inputs: # type: ignore[possibly-undefined]
var.defined = False
storage = emitter.emit_tokens(override, storage, None)
out.start_line()
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
storage.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False)
else:
emit_default(out, uop, stack)
out.start_line()
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=False)
except StackError as ex:
raise analysis_error(ex.args[0], uop.body[0])
raise analysis_error(ex.args[0], prototype.body[0]) # from None
SKIPS = ("_EXTENDED_ARG",)

View File

@ -46,20 +46,41 @@ class Local:
in_memory: bool
defined: bool
def __repr__(self) -> str:
return f"Local('{self.item.name}', mem={self.in_memory}, defined={self.defined}, array={self.is_array()})"
def compact_str(self) -> str:
mtag = "M" if self.in_memory else ""
dtag = "D" if self.defined else ""
atag = "A" if self.is_array() else ""
return f"'{self.item.name}'{mtag}{dtag}{atag}"
@staticmethod
def unused(defn: StackItem) -> "Local":
return Local(defn, False, defn.is_array(), False)
@staticmethod
def local(defn: StackItem) -> "Local":
def undefined(defn: StackItem) -> "Local":
array = defn.is_array()
return Local(defn, not array, array, True)
return Local(defn, not array, array, False)
@staticmethod
def redefinition(var: StackItem, prev: "Local") -> "Local":
assert var.is_array() == prev.is_array()
return Local(var, prev.cached, prev.in_memory, True)
@staticmethod
def from_memory(defn: StackItem) -> "Local":
return Local(defn, True, True, True)
def copy(self) -> "Local":
return Local(
self.item,
self.cached,
self.in_memory,
self.defined
)
@property
def size(self) -> str:
return self.item.size
@ -75,6 +96,16 @@ class Local:
def is_array(self) -> bool:
return self.item.is_array()
def __eq__(self, other: object) -> bool:
if not isinstance(other, Local):
return NotImplemented
return (
self.item is other.item
and self.cached is other.cached
and self.in_memory is other.in_memory
and self.defined is other.defined
)
@dataclass
class StackOffset:
@ -156,10 +187,34 @@ class StackOffset:
res = "-" + res[3:]
return res
def as_int(self) -> int | None:
self.simplify()
int_offset = 0
for item in self.popped:
try:
int_offset -= int(item)
except ValueError:
return None
for item in self.pushed:
try:
int_offset += int(item)
except ValueError:
return None
return int_offset
def clear(self) -> None:
self.popped = []
self.pushed = []
def __bool__(self) -> bool:
self.simplify()
return bool(self.popped) or bool(self.pushed)
def __eq__(self, other: object) -> bool:
if not isinstance(other, StackOffset):
return NotImplemented
return self.to_c() == other.to_c()
class StackError(Exception):
pass
@ -174,7 +229,7 @@ class Stack:
self.variables: list[Local] = []
self.defined: set[str] = set()
def pop(self, var: StackItem, extract_bits: bool = False) -> tuple[str, Local]:
def pop(self, var: StackItem, extract_bits: bool = True) -> tuple[str, Local]:
self.top_offset.pop(var)
indirect = "&" if var.is_array() else ""
if self.variables:
@ -192,7 +247,7 @@ class Stack:
if var.name in UNUSED:
if popped.name not in UNUSED and popped.name in self.defined:
raise StackError(
f"Value is declared unused, but is already cached by prior operation"
f"Value is declared unused, but is already cached by prior operation as '{popped.name}'"
)
return "", popped
if not var.used:
@ -208,6 +263,7 @@ class Stack:
defn = f"{var.name} = &stack_pointer[{self.top_offset.to_c()}];\n"
else:
defn = f"{var.name} = stack_pointer[{self.top_offset.to_c()}];\n"
popped.in_memory = True
return defn, Local.redefinition(var, popped)
self.base_offset.pop(var)
@ -215,7 +271,7 @@ class Stack:
return "", Local.unused(var)
self.defined.add(var.name)
cast = f"({var.type})" if (not indirect and var.type) else ""
bits = ".bits" if cast and not extract_bits else ""
bits = ".bits" if cast and extract_bits else ""
assign = f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};"
if var.condition:
if var.condition == "1":
@ -226,27 +282,14 @@ class Stack:
assign = f"if ({var.condition}) {{ {assign} }}\n"
else:
assign = f"{assign}\n"
in_memory = var.is_array() or var.peek
return assign, Local(var, not var.is_array(), in_memory, True)
return assign, Local.from_memory(var)
def push(self, var: Local) -> None:
assert(var not in self.variables)
self.variables.append(var)
self.top_offset.push(var.item)
if var.item.used:
self.defined.add(var.name)
var.defined = True
def define_output_arrays(self, outputs: list[StackItem]) -> str:
res = []
top_offset = self.top_offset.copy()
for var in outputs:
if var.is_array() and var.used and not var.peek:
c_offset = top_offset.to_c()
top_offset.push(var)
res.append(f"{var.name} = &stack_pointer[{c_offset}];\n")
else:
top_offset.push(var)
return "\n".join(res)
@staticmethod
def _do_emit(
@ -254,102 +297,92 @@ class Stack:
var: StackItem,
base_offset: StackOffset,
cast_type: str = "uintptr_t",
extract_bits: bool = False,
extract_bits: bool = True,
) -> None:
cast = f"({cast_type})" if var.type else ""
bits = ".bits" if cast and not extract_bits else ""
bits = ".bits" if cast and extract_bits else ""
if var.condition == "0":
return
if var.condition and var.condition != "1":
out.emit(f"if ({var.condition}) ")
out.emit(f"stack_pointer[{base_offset.to_c()}]{bits} = {cast}{var.name};\n")
@staticmethod
def _do_flush(
out: CWriter,
variables: list[Local],
base_offset: StackOffset,
top_offset: StackOffset,
cast_type: str = "uintptr_t",
extract_bits: bool = False,
) -> None:
out.start_line()
for var in variables:
if (
var.cached
and not var.in_memory
and not var.item.peek
and not var.name in UNUSED
):
Stack._do_emit(out, var.item, base_offset, cast_type, extract_bits)
base_offset.push(var.item)
if base_offset.to_c() != top_offset.to_c():
print("base", base_offset, "top", top_offset)
assert False
number = base_offset.to_c()
def _adjust_stack_pointer(self, out: CWriter, number: str) -> None:
if number != "0":
out.start_line()
out.emit(f"stack_pointer += {number};\n")
out.emit("assert(WITHIN_STACK_BOUNDS());\n")
out.start_line()
def flush_locally(
self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False
) -> None:
self._do_flush(
out,
self.variables[:],
self.base_offset.copy(),
self.top_offset.copy(),
cast_type,
extract_bits,
)
def flush(
self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False
self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True
) -> None:
self._do_flush(
out,
self.variables,
self.base_offset,
self.top_offset,
cast_type,
extract_bits,
)
self.variables = []
self.base_offset.clear()
self.top_offset.clear()
def flush_single_var(
self,
out: CWriter,
var_name: str,
outputs: list[StackItem],
cast_type: str = "uintptr_t",
extract_bits: bool = False,
) -> None:
assert any(var.name == var_name for var in outputs)
base_offset = self.base_offset.copy()
top_offset = self.top_offset.copy()
out.start_line()
var_offset = self.base_offset.copy()
for var in self.variables:
base_offset.push(var.item)
for output in outputs:
if any(output == v.item for v in self.variables):
# The variable is already on the stack, such as a peeked value
# in the tier1 generator
continue
if output.name == var_name:
Stack._do_emit(out, output, base_offset, cast_type, extract_bits)
base_offset.push(output)
top_offset.push(output)
if base_offset.to_c() != top_offset.to_c():
print("base", base_offset, "top", top_offset)
assert False
if (
var.defined and
not var.in_memory
):
Stack._do_emit(out, var.item, var_offset, cast_type, extract_bits)
var.in_memory = True
var_offset.push(var.item)
number = self.top_offset.to_c()
self._adjust_stack_pointer(out, number)
self.base_offset -= self.top_offset
self.top_offset.clear()
out.start_line()
def is_flushed(self) -> bool:
return not self.variables and not self.base_offset and not self.top_offset
def peek_offset(self) -> str:
return self.top_offset.to_c()
def as_comment(self) -> str:
return f"/* Variables: {[v.name for v in self.variables]}. Base offset: {self.base_offset.to_c()}. Top offset: {self.top_offset.to_c()} */"
variables = ", ".join([v.compact_str() for v in self.variables])
return (
f"/* Variables: {variables}. base: {self.base_offset.to_c()}. top: {self.top_offset.to_c()} */"
)
def copy(self) -> "Stack":
other = Stack()
other.top_offset = self.top_offset.copy()
other.base_offset = self.base_offset.copy()
other.variables = [var.copy() for var in self.variables]
other.defined = set(self.defined)
return other
def __eq__(self, other: object) -> bool:
if not isinstance(other, Stack):
return NotImplemented
return (
self.top_offset == other.top_offset
and self.base_offset == other.base_offset
and self.variables == other.variables
)
def align(self, other: "Stack", out: CWriter) -> None:
if len(self.variables) != len(other.variables):
raise StackError("Cannot align stacks: differing variables")
if self.top_offset == other.top_offset:
return
diff = self.top_offset - other.top_offset
try:
self.top_offset -= diff
self.base_offset -= diff
self._adjust_stack_pointer(out, diff.to_c())
except ValueError:
raise StackError("Cannot align stacks: cannot adjust stack pointer")
def merge(self, other: "Stack", out: CWriter) -> None:
if len(self.variables) != len(other.variables):
raise StackError("Cannot merge stacks: differing variables")
for self_var, other_var in zip(self.variables, other.variables):
if self_var.name != other_var.name:
raise StackError(f"Mismatched variables on stack: {self_var.name} and {other_var.name}")
self_var.defined = self_var.defined and other_var.defined
self_var.in_memory = self_var.in_memory and other_var.in_memory
self.align(other, out)
def get_stack_effect(inst: Instruction | PseudoInstruction) -> Stack:
@ -377,3 +410,213 @@ def get_stack_effect(inst: Instruction | PseudoInstruction) -> Stack:
local = Local.unused(var)
stack.push(local)
return stack
@dataclass
class Storage:
stack: Stack
inputs: list[Local]
outputs: list[Local]
peeks: list[Local]
spilled: int = 0
@staticmethod
def needs_defining(var: Local) -> bool:
return (
not var.defined and
not var.is_array() and
var.name != "unused"
)
@staticmethod
def is_live(var: Local) -> bool:
return (
var.defined and
var.name != "unused"
)
def first_input_not_cleared(self) -> str:
for input in self.inputs:
if input.defined:
return input.name
return ""
def clear_inputs(self, reason:str) -> None:
while self.inputs:
tos = self.inputs.pop()
if self.is_live(tos) and not tos.is_array():
raise StackError(
f"Input '{tos.name}' is still live {reason}"
)
self.stack.pop(tos.item)
def clear_dead_inputs(self) -> None:
live = ""
while self.inputs:
tos = self.inputs[-1]
if self.is_live(tos):
live = tos.name
break
self.inputs.pop()
self.stack.pop(tos.item)
for var in self.inputs:
if not var.defined and not var.is_array() and var.name != "unused":
raise StackError(
f"Input '{var.name}' is not live, but '{live}' is"
)
def _push_defined_outputs(self) -> None:
defined_output = ""
for output in self.outputs:
if output.defined and not output.in_memory:
defined_output = output.name
if not defined_output:
return
self.clear_inputs(f"when output '{defined_output}' is defined")
undefined = ""
for out in self.outputs:
if out.defined:
if undefined:
f"Locals not defined in stack order. "
f"Expected '{undefined}' to be defined before '{out.name}'"
else:
undefined = out.name
while self.outputs and not self.needs_defining(self.outputs[0]):
out = self.outputs.pop(0)
self.stack.push(out)
def locals_cached(self) -> bool:
for out in self.outputs:
if out.defined:
return True
return False
def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = True) -> None:
self.clear_dead_inputs()
self._push_defined_outputs()
self.stack.flush(out, cast_type, extract_bits)
def save(self, out: CWriter) -> None:
assert self.spilled >= 0
if self.spilled == 0:
self.flush(out)
out.start_line()
out.emit("_PyFrame_SetStackPointer(frame, stack_pointer);\n")
self.spilled += 1
def reload(self, out: CWriter) -> None:
if self.spilled == 0:
raise StackError("Cannot reload stack as it hasn't been saved")
assert self.spilled > 0
self.spilled -= 1
if self.spilled == 0:
out.start_line()
out.emit("stack_pointer = _PyFrame_GetStackPointer(frame);\n")
@staticmethod
def for_uop(stack: Stack, uop: Uop, extract_bits: bool = True) -> tuple[list[str], "Storage"]:
code_list: list[str] = []
inputs: list[Local] = []
peeks: list[Local] = []
for input in reversed(uop.stack.inputs):
code, local = stack.pop(input, extract_bits)
code_list.append(code)
if input.peek:
peeks.append(local)
else:
inputs.append(local)
inputs.reverse()
peeks.reverse()
for peek in peeks:
stack.push(peek)
top_offset = stack.top_offset.copy()
for ouput in uop.stack.outputs:
if ouput.is_array() and ouput.used and not ouput.peek:
c_offset = top_offset.to_c()
top_offset.push(ouput)
code_list.append(f"{ouput.name} = &stack_pointer[{c_offset}];\n")
else:
top_offset.push(ouput)
for var in inputs:
stack.push(var)
outputs = [ Local.undefined(var) for var in uop.stack.outputs if not var.peek ]
return code_list, Storage(stack, inputs, outputs, peeks)
@staticmethod
def copy_list(arg: list[Local]) -> list[Local]:
return [ l.copy() for l in arg ]
def copy(self) -> "Storage":
new_stack = self.stack.copy()
variables = { var.name: var for var in new_stack.variables }
inputs = [ variables[var.name] for var in self.inputs]
assert [v.name for v in inputs] == [v.name for v in self.inputs], (inputs, self.inputs)
return Storage(
new_stack, inputs,
self.copy_list(self.outputs), self.copy_list(self.peeks)
)
def sanity_check(self) -> None:
names: set[str] = set()
for var in self.inputs:
if var.name in names:
raise StackError(f"Duplicate name {var.name}")
names.add(var.name)
names = set()
for var in self.outputs:
if var.name in names:
raise StackError(f"Duplicate name {var.name}")
names.add(var.name)
names = set()
for var in self.stack.variables:
if var.name in names:
raise StackError(f"Duplicate name {var.name}")
names.add(var.name)
def is_flushed(self) -> bool:
for var in self.outputs:
if var.defined and not var.in_memory:
return False
return self.stack.is_flushed()
def merge(self, other: "Storage", out: CWriter) -> None:
self.sanity_check()
if len(self.inputs) != len(other.inputs):
self.clear_dead_inputs()
other.clear_dead_inputs()
if len(self.inputs) != len(other.inputs):
diff = self.inputs[-1] if len(self.inputs) > len(other.inputs) else other.inputs[-1]
raise StackError(f"Unmergeable inputs. Differing state of '{diff.name}'")
for var, other_var in zip(self.inputs, other.inputs):
if var.defined != other_var.defined:
raise StackError(f"'{var.name}' is cleared on some paths, but not all")
if len(self.outputs) != len(other.outputs):
self._push_defined_outputs()
other._push_defined_outputs()
if len(self.outputs) != len(other.outputs):
var = self.outputs[0] if len(self.outputs) > len(other.outputs) else other.outputs[0]
raise StackError(f"'{var.name}' is set on some paths, but not all")
self.stack.merge(other.stack, out)
self.sanity_check()
def push_outputs(self) -> None:
if self.spilled:
raise StackError(f"Unbalanced stack spills")
self.clear_inputs("at the end of the micro-op")
if self.inputs:
raise StackError(f"Input variable '{self.inputs[-1].name}' is still live")
self._push_defined_outputs()
if self.outputs:
for out in self.outputs:
if self.needs_defining(out):
raise StackError(f"Output variable '{self.outputs[0].name}' is not defined")
self.stack.push(out)
self.outputs = []
def as_comment(self) -> str:
stack_comment = self.stack.as_comment()
next_line = "\n "
inputs = ", ".join([var.compact_str() for var in self.inputs])
outputs = ", ".join([var.compact_str() for var in self.outputs])
peeks = ", ".join([var.name for var in self.peeks])
return f"{stack_comment[:-2]}{next_line}inputs: {inputs}{next_line}outputs: {outputs}{next_line}peeks: {peeks} */"

View File

@ -22,10 +22,11 @@ from generators_common import (
write_header,
type_and_null,
Emitter,
TokenIterator,
)
from cwriter import CWriter
from typing import TextIO
from stack import Local, Stack, StackError, get_stack_effect
from stack import Local, Stack, StackError, get_stack_effect, Storage
DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h"
@ -47,7 +48,7 @@ def declare_variables(inst: Instruction, out: CWriter) -> None:
try:
stack = get_stack_effect(inst)
except StackError as ex:
raise analysis_error(ex.args[0], inst.where)
raise analysis_error(ex.args[0], inst.where) from None
required = set(stack.defined)
required.discard("unused")
for part in inst.parts:
@ -70,46 +71,26 @@ def write_uop(
stack: Stack,
inst: Instruction,
braces: bool,
) -> int:
) -> tuple[int, Stack]:
# out.emit(stack.as_comment() + "\n")
if isinstance(uop, Skip):
entries = "entries" if uop.size > 1 else "entry"
emitter.emit(f"/* Skip {uop.size} cache {entries} */\n")
return offset + uop.size
return (offset + uop.size), stack
if isinstance(uop, Flush):
emitter.emit(f"// flush\n")
stack.flush(emitter.out)
return offset
return offset, stack
try:
locals: dict[str, Local] = {}
emitter.out.start_line()
if braces:
emitter.out.emit(f"// {uop.name}\n")
peeks: list[Local] = []
for var in reversed(uop.stack.inputs):
code, local = stack.pop(var)
emitter.emit(code)
if var.peek:
peeks.append(local)
if local.defined:
locals[local.name] = local
# Push back the peeks, so that they remain on the logical
# stack, but their values are cached.
while peeks:
stack.push(peeks.pop())
if braces:
emitter.emit("{\n")
emitter.out.emit(stack.define_output_arrays(uop.stack.outputs))
outputs: list[Local] = []
for var in uop.stack.outputs:
if not var.peek:
if var.name in locals:
local = locals[var.name]
elif var.name == "unused":
local = Local.unused(var)
else:
local = Local.local(var)
outputs.append(local)
code_list, storage = Storage.for_uop(stack, uop)
emitter._print_storage(storage)
for code in code_list:
emitter.emit(code)
for cache in uop.caches:
if cache.name != "unused":
@ -125,17 +106,13 @@ def write_uop(
if inst.family is None:
emitter.emit(f"(void){cache.name};\n")
offset += cache.size
emitter.emit_tokens(uop, stack, inst)
for output in outputs:
if output.name in uop.deferred_refs.values():
# We've already spilled this when emitting tokens
output.cached = False
stack.push(output)
storage = emitter.emit_tokens(uop, storage, inst)
if braces:
emitter.out.start_line()
emitter.emit("}\n")
# emitter.emit(stack.as_comment() + "\n")
return offset
return offset, storage.stack
except StackError as ex:
raise analysis_error(ex.args[0], uop.body[0])
@ -197,10 +174,11 @@ def generate_tier1(
for part in inst.parts:
# Only emit braces if more than one uop
insert_braces = len([p for p in inst.parts if isinstance(p, Uop)]) > 1
offset = write_uop(part, emitter, offset, stack, inst, insert_braces)
offset, stack = write_uop(part, emitter, offset, stack, inst, insert_braces)
out.start_line()
if not inst.parts[-1].properties.always_exits:
stack.flush(out)
if not inst.parts[-1].properties.always_exits:
out.emit("DISPATCH();\n")
out.start_line()
out.emit("}")

View File

@ -20,11 +20,13 @@ from generators_common import (
write_header,
type_and_null,
Emitter,
TokenIterator,
always_true,
)
from cwriter import CWriter
from typing import TextIO, Iterator
from lexer import Token
from stack import Local, Stack, StackError, get_stack_effect
from stack import Local, Stack, StackError, Storage
DEFAULT_OUTPUT = ROOT / "Python/executor_cases.c.h"
@ -32,7 +34,7 @@ DEFAULT_OUTPUT = ROOT / "Python/executor_cases.c.h"
def declare_variable(
var: StackItem, uop: Uop, required: set[str], out: CWriter
) -> None:
if var.name not in required:
if not var.used or var.name not in required:
return
required.remove(var.name)
type, null = type_and_null(var)
@ -52,7 +54,7 @@ def declare_variables(uop: Uop, out: CWriter) -> None:
for var in reversed(uop.stack.inputs):
stack.pop(var)
for var in uop.stack.outputs:
stack.push(Local.unused(var))
stack.push(Local.undefined(var))
required = set(stack.defined)
required.discard("unused")
for var in reversed(uop.stack.inputs):
@ -69,88 +71,103 @@ class Tier2Emitter(Emitter):
def error_if(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
self.out.emit_at("if ", tkn)
self.emit(next(tkn_iter))
lparen = next(tkn_iter)
self.emit(lparen)
assert lparen.kind == "LPAREN"
first_tkn = next(tkn_iter)
self.out.emit(first_tkn)
emit_to(self.out, tkn_iter, "COMMA")
label = next(tkn_iter).text
next(tkn_iter) # RPAREN
next(tkn_iter) # Semi colon
self.emit(") JUMP_TO_ERROR();\n")
return not always_true(first_tkn)
def error_no_pop(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
stack: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
next(tkn_iter) # LPAREN
next(tkn_iter) # RPAREN
next(tkn_iter) # Semi colon
self.out.emit_at("JUMP_TO_ERROR();", tkn)
return False
def deopt_if(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
unused: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
self.out.emit_at("if ", tkn)
self.emit(next(tkn_iter))
lparen = next(tkn_iter)
self.emit(lparen)
assert lparen.kind == "LPAREN"
first_tkn = tkn_iter.peek()
emit_to(self.out, tkn_iter, "RPAREN")
next(tkn_iter) # Semi colon
self.emit(") {\n")
self.emit("UOP_STAT_INC(uopcode, miss);\n")
self.emit("JUMP_TO_JUMP_TARGET();\n")
self.emit("}\n")
return not always_true(first_tkn)
def exit_if( # type: ignore[override]
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
unused: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
self.out.emit_at("if ", tkn)
self.emit(next(tkn_iter))
lparen = next(tkn_iter)
self.emit(lparen)
first_tkn = tkn_iter.peek()
emit_to(self.out, tkn_iter, "RPAREN")
next(tkn_iter) # Semi colon
self.emit(") {\n")
self.emit("UOP_STAT_INC(uopcode, miss);\n")
self.emit("JUMP_TO_JUMP_TARGET();\n")
self.emit("}\n")
return not always_true(first_tkn)
def oparg(
self,
tkn: Token,
tkn_iter: Iterator[Token],
tkn_iter: TokenIterator,
uop: Uop,
unused: Stack,
storage: Storage,
inst: Instruction | None,
) -> None:
) -> bool:
if not uop.name.endswith("_0") and not uop.name.endswith("_1"):
self.emit(tkn)
return
return True
amp = next(tkn_iter)
if amp.text != "&":
self.emit(tkn)
self.emit(amp)
return
return True
one = next(tkn_iter)
assert one.text == "1"
self.out.emit_at(uop.name[-1], tkn)
return True
def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> Stack:
locals: dict[str, Local] = {}
try:
emitter.out.start_line()
@ -160,19 +177,9 @@ def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
elif uop.properties.const_oparg >= 0:
emitter.emit(f"oparg = {uop.properties.const_oparg};\n")
emitter.emit(f"assert(oparg == CURRENT_OPARG());\n")
for var in reversed(uop.stack.inputs):
code, local = stack.pop(var)
code_list, storage = Storage.for_uop(stack, uop)
for code in code_list:
emitter.emit(code)
if local.defined:
locals[local.name] = local
emitter.emit(stack.define_output_arrays(uop.stack.outputs))
outputs: list[Local] = []
for var in uop.stack.outputs:
if var.name in locals:
local = locals[var.name]
else:
local = Local.local(var)
outputs.append(local)
for cache in uop.caches:
if cache.name != "unused":
if cache.size == 4:
@ -181,15 +188,10 @@ def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
type = f"uint{cache.size*16}_t "
cast = f"uint{cache.size*16}_t"
emitter.emit(f"{type}{cache.name} = ({cast})CURRENT_OPERAND();\n")
emitter.emit_tokens(uop, stack, None)
for output in outputs:
if output.name in uop.deferred_refs.values():
# We've already spilled this when emitting tokens
output.cached = False
stack.push(output)
storage = emitter.emit_tokens(uop, storage, None)
except StackError as ex:
raise analysis_error(ex.args[0], uop.body[0]) from None
return storage.stack
SKIPS = ("_EXTENDED_ARG",)
@ -226,7 +228,7 @@ def generate_tier2(
out.emit(f"case {uop.name}: {{\n")
declare_variables(uop, out)
stack = Stack()
write_uop(uop, emitter, stack)
stack = write_uop(uop, emitter, stack)
out.start_line()
if not uop.properties.always_exits:
stack.flush(out)