mirror of https://github.com/python/cpython
Merge deoptimization blocks in interpreter (GH-32155)
This commit is contained in:
parent
75eee1d57e
commit
04acfa94bb
|
@ -110,6 +110,8 @@ _PyCode_Warmup(PyCodeObject *code)
|
|||
}
|
||||
}
|
||||
|
||||
extern uint8_t _PyOpcode_Adaptive[256];
|
||||
|
||||
extern Py_ssize_t _Py_QuickenedCount;
|
||||
|
||||
// Borrowed references to common callables:
|
||||
|
|
|
@ -1431,7 +1431,7 @@ eval_frame_handle_pending(PyThreadState *tstate)
|
|||
#define JUMP_TO_INSTRUCTION(op) goto PREDICT_ID(op)
|
||||
|
||||
|
||||
#define DEOPT_IF(cond, instname) if (cond) { goto instname ## _miss; }
|
||||
#define DEOPT_IF(cond, instname) if (cond) { goto miss; }
|
||||
|
||||
|
||||
#define GLOBALS() frame->f_globals
|
||||
|
@ -2551,18 +2551,18 @@ handle_eval_breaker:
|
|||
}
|
||||
Py_DECREF(v);
|
||||
if (gen_status == PYGEN_ERROR) {
|
||||
assert (retval == NULL);
|
||||
assert(retval == NULL);
|
||||
goto error;
|
||||
}
|
||||
if (gen_status == PYGEN_RETURN) {
|
||||
assert (retval != NULL);
|
||||
assert(retval != NULL);
|
||||
Py_DECREF(receiver);
|
||||
SET_TOP(retval);
|
||||
JUMPBY(oparg);
|
||||
DISPATCH();
|
||||
}
|
||||
assert (gen_status == PYGEN_NEXT);
|
||||
assert (retval != NULL);
|
||||
assert(gen_status == PYGEN_NEXT);
|
||||
assert(retval != NULL);
|
||||
PUSH(retval);
|
||||
DISPATCH();
|
||||
}
|
||||
|
@ -4595,7 +4595,6 @@ handle_eval_breaker:
|
|||
}
|
||||
|
||||
TARGET(CALL) {
|
||||
PREDICTED(CALL);
|
||||
int is_meth;
|
||||
call_function:
|
||||
is_meth = is_method(stack_pointer, oparg);
|
||||
|
@ -5524,34 +5523,25 @@ handle_eval_breaker:
|
|||
|
||||
/* Specialization misses */
|
||||
|
||||
#define MISS_WITH_INLINE_CACHE(opname) \
|
||||
opname ## _miss: \
|
||||
{ \
|
||||
STAT_INC(opcode, miss); \
|
||||
STAT_INC(opname, miss); \
|
||||
/* The counter is always the first cache entry: */ \
|
||||
_Py_CODEUNIT *counter = (_Py_CODEUNIT *)next_instr; \
|
||||
*counter -= 1; \
|
||||
if (*counter == 0) { \
|
||||
_Py_SET_OPCODE(next_instr[-1], opname ## _ADAPTIVE); \
|
||||
STAT_INC(opname, deopt); \
|
||||
*counter = ADAPTIVE_CACHE_BACKOFF; \
|
||||
} \
|
||||
JUMP_TO_INSTRUCTION(opname); \
|
||||
miss:
|
||||
{
|
||||
STAT_INC(opcode, miss);
|
||||
opcode = _PyOpcode_Deopt[opcode];
|
||||
STAT_INC(opcode, miss);
|
||||
/* The counter is always the first cache entry: */
|
||||
_Py_CODEUNIT *counter = (_Py_CODEUNIT *)next_instr;
|
||||
*counter -= 1;
|
||||
if (*counter == 0) {
|
||||
int adaptive_opcode = _PyOpcode_Adaptive[opcode];
|
||||
assert(adaptive_opcode);
|
||||
_Py_SET_OPCODE(next_instr[-1], adaptive_opcode);
|
||||
STAT_INC(opcode, deopt);
|
||||
*counter = ADAPTIVE_CACHE_BACKOFF;
|
||||
}
|
||||
next_instr--;
|
||||
DISPATCH_GOTO();
|
||||
}
|
||||
|
||||
MISS_WITH_INLINE_CACHE(LOAD_ATTR)
|
||||
MISS_WITH_INLINE_CACHE(STORE_ATTR)
|
||||
MISS_WITH_INLINE_CACHE(LOAD_GLOBAL)
|
||||
MISS_WITH_INLINE_CACHE(LOAD_METHOD)
|
||||
MISS_WITH_INLINE_CACHE(PRECALL)
|
||||
MISS_WITH_INLINE_CACHE(CALL)
|
||||
MISS_WITH_INLINE_CACHE(BINARY_OP)
|
||||
MISS_WITH_INLINE_CACHE(COMPARE_OP)
|
||||
MISS_WITH_INLINE_CACHE(BINARY_SUBSCR)
|
||||
MISS_WITH_INLINE_CACHE(UNPACK_SEQUENCE)
|
||||
MISS_WITH_INLINE_CACHE(STORE_SUBSCR)
|
||||
|
||||
binary_subscr_dict_error:
|
||||
{
|
||||
PyObject *sub = POP();
|
||||
|
@ -6717,7 +6707,7 @@ call_trace(Py_tracefunc func, PyObject *obj,
|
|||
int old_what = tstate->tracing_what;
|
||||
tstate->tracing_what = what;
|
||||
PyThreadState_EnterTracing(tstate);
|
||||
assert (frame->f_lasti >= 0);
|
||||
assert(frame->f_lasti >= 0);
|
||||
initialize_trace_info(&tstate->trace_info, frame);
|
||||
f->f_lineno = _PyCode_CheckLineNumber(frame->f_lasti*sizeof(_Py_CODEUNIT), &tstate->trace_info.bounds);
|
||||
result = func(obj, f, what, arg);
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
/* Map from opcode to adaptive opcode.
|
||||
Values of zero are ignored. */
|
||||
static uint8_t adaptive_opcodes[256] = {
|
||||
uint8_t _PyOpcode_Adaptive[256] = {
|
||||
[LOAD_ATTR] = LOAD_ATTR_ADAPTIVE,
|
||||
[LOAD_GLOBAL] = LOAD_GLOBAL_ADAPTIVE,
|
||||
[LOAD_METHOD] = LOAD_METHOD_ADAPTIVE,
|
||||
|
@ -143,7 +143,7 @@ print_spec_stats(FILE *out, OpcodeStats *stats)
|
|||
* even though we don't specialize them yet. */
|
||||
fprintf(out, "opcode[%d].specializable : 1\n", FOR_ITER);
|
||||
for (int i = 0; i < 256; i++) {
|
||||
if (adaptive_opcodes[i]) {
|
||||
if (_PyOpcode_Adaptive[i]) {
|
||||
fprintf(out, "opcode[%d].specializable : 1\n", i);
|
||||
}
|
||||
PRINT_STAT(i, specialization.success);
|
||||
|
@ -259,7 +259,7 @@ _PyCode_Quicken(PyCodeObject *code)
|
|||
_Py_CODEUNIT *instructions = _PyCode_CODE(code);
|
||||
for (int i = 0; i < Py_SIZE(code); i++) {
|
||||
int opcode = _Py_OPCODE(instructions[i]);
|
||||
uint8_t adaptive_opcode = adaptive_opcodes[opcode];
|
||||
uint8_t adaptive_opcode = _PyOpcode_Adaptive[opcode];
|
||||
if (adaptive_opcode) {
|
||||
_Py_SET_OPCODE(instructions[i], adaptive_opcode);
|
||||
// Make sure the adaptive counter is zero:
|
||||
|
|
Loading…
Reference in New Issue