mirror of
https://github.com/python/cpython.git
synced 2024-11-21 12:59:38 +01:00
* Add support for 'prev_instr' to code generator and refactor some INSTRUMENTED instructions
This commit is contained in:
parent
d9efa45d74
commit
afb0aa6ed2
9
Include/internal/pycore_opcode_metadata.h
generated
9
Include/internal/pycore_opcode_metadata.h
generated
@ -231,6 +231,8 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
|
||||
return 0;
|
||||
case INSTRUMENTED_JUMP_FORWARD:
|
||||
return 0;
|
||||
case INSTRUMENTED_LINE:
|
||||
return 0;
|
||||
case INSTRUMENTED_LOAD_SUPER_ATTR:
|
||||
return 3;
|
||||
case INSTRUMENTED_POP_JUMP_IF_FALSE:
|
||||
@ -676,6 +678,8 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
|
||||
return 0;
|
||||
case INSTRUMENTED_JUMP_FORWARD:
|
||||
return 0;
|
||||
case INSTRUMENTED_LINE:
|
||||
return 0;
|
||||
case INSTRUMENTED_LOAD_SUPER_ATTR:
|
||||
return 1 + (oparg & 1);
|
||||
case INSTRUMENTED_POP_JUMP_IF_FALSE:
|
||||
@ -689,9 +693,9 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
|
||||
case INSTRUMENTED_RESUME:
|
||||
return 0;
|
||||
case INSTRUMENTED_RETURN_CONST:
|
||||
return 0;
|
||||
return 1;
|
||||
case INSTRUMENTED_RETURN_VALUE:
|
||||
return 0;
|
||||
return 1;
|
||||
case INSTRUMENTED_YIELD_VALUE:
|
||||
return 1;
|
||||
case INTERPRETER_EXIT:
|
||||
@ -1083,6 +1087,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[264] = {
|
||||
[INSTRUMENTED_INSTRUCTION] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
|
||||
[INSTRUMENTED_JUMP_BACKWARD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG },
|
||||
[INSTRUMENTED_JUMP_FORWARD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
|
||||
[INSTRUMENTED_LINE] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG },
|
||||
[INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
|
||||
[INSTRUMENTED_POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
|
||||
[INSTRUMENTED_POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
|
||||
|
4
Include/internal/pycore_uop_ids.h
generated
4
Include/internal/pycore_uop_ids.h
generated
@ -145,15 +145,13 @@ extern "C" {
|
||||
#define _INSTRUMENTED_INSTRUCTION INSTRUMENTED_INSTRUCTION
|
||||
#define _INSTRUMENTED_JUMP_BACKWARD INSTRUMENTED_JUMP_BACKWARD
|
||||
#define _INSTRUMENTED_JUMP_FORWARD INSTRUMENTED_JUMP_FORWARD
|
||||
#define _INSTRUMENTED_LINE INSTRUMENTED_LINE
|
||||
#define _INSTRUMENTED_LOAD_SUPER_ATTR INSTRUMENTED_LOAD_SUPER_ATTR
|
||||
#define _INSTRUMENTED_POP_JUMP_IF_FALSE INSTRUMENTED_POP_JUMP_IF_FALSE
|
||||
#define _INSTRUMENTED_POP_JUMP_IF_NONE INSTRUMENTED_POP_JUMP_IF_NONE
|
||||
#define _INSTRUMENTED_POP_JUMP_IF_NOT_NONE INSTRUMENTED_POP_JUMP_IF_NOT_NONE
|
||||
#define _INSTRUMENTED_POP_JUMP_IF_TRUE INSTRUMENTED_POP_JUMP_IF_TRUE
|
||||
#define _INSTRUMENTED_RESUME INSTRUMENTED_RESUME
|
||||
#define _INSTRUMENTED_RETURN_CONST INSTRUMENTED_RETURN_CONST
|
||||
#define _INSTRUMENTED_RETURN_VALUE INSTRUMENTED_RETURN_VALUE
|
||||
#define _INSTRUMENTED_YIELD_VALUE INSTRUMENTED_YIELD_VALUE
|
||||
#define _INTERNAL_INCREMENT_OPT_COUNTER 381
|
||||
#define _IS_NONE 382
|
||||
#define _IS_OP IS_OP
|
||||
|
30
Include/opcode_ids.h
generated
30
Include/opcode_ids.h
generated
@ -204,21 +204,21 @@ extern "C" {
|
||||
#define INSTRUMENTED_RESUME 236
|
||||
#define INSTRUMENTED_END_FOR 237
|
||||
#define INSTRUMENTED_END_SEND 238
|
||||
#define INSTRUMENTED_RETURN_VALUE 239
|
||||
#define INSTRUMENTED_RETURN_CONST 240
|
||||
#define INSTRUMENTED_YIELD_VALUE 241
|
||||
#define INSTRUMENTED_LOAD_SUPER_ATTR 242
|
||||
#define INSTRUMENTED_FOR_ITER 243
|
||||
#define INSTRUMENTED_CALL 244
|
||||
#define INSTRUMENTED_CALL_KW 245
|
||||
#define INSTRUMENTED_CALL_FUNCTION_EX 246
|
||||
#define INSTRUMENTED_INSTRUCTION 247
|
||||
#define INSTRUMENTED_JUMP_FORWARD 248
|
||||
#define INSTRUMENTED_JUMP_BACKWARD 249
|
||||
#define INSTRUMENTED_POP_JUMP_IF_TRUE 250
|
||||
#define INSTRUMENTED_POP_JUMP_IF_FALSE 251
|
||||
#define INSTRUMENTED_POP_JUMP_IF_NONE 252
|
||||
#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 253
|
||||
#define INSTRUMENTED_LOAD_SUPER_ATTR 239
|
||||
#define INSTRUMENTED_FOR_ITER 240
|
||||
#define INSTRUMENTED_CALL 241
|
||||
#define INSTRUMENTED_CALL_KW 242
|
||||
#define INSTRUMENTED_CALL_FUNCTION_EX 243
|
||||
#define INSTRUMENTED_INSTRUCTION 244
|
||||
#define INSTRUMENTED_JUMP_FORWARD 245
|
||||
#define INSTRUMENTED_JUMP_BACKWARD 246
|
||||
#define INSTRUMENTED_POP_JUMP_IF_TRUE 247
|
||||
#define INSTRUMENTED_POP_JUMP_IF_FALSE 248
|
||||
#define INSTRUMENTED_POP_JUMP_IF_NONE 249
|
||||
#define INSTRUMENTED_POP_JUMP_IF_NOT_NONE 250
|
||||
#define INSTRUMENTED_RETURN_VALUE 251
|
||||
#define INSTRUMENTED_RETURN_CONST 252
|
||||
#define INSTRUMENTED_YIELD_VALUE 253
|
||||
#define INSTRUMENTED_LINE 254
|
||||
#define JUMP 256
|
||||
#define JUMP_NO_INTERRUPT 257
|
||||
|
30
Lib/_opcode_metadata.py
generated
30
Lib/_opcode_metadata.py
generated
@ -308,21 +308,21 @@ opmap = {
|
||||
'INSTRUMENTED_RESUME': 236,
|
||||
'INSTRUMENTED_END_FOR': 237,
|
||||
'INSTRUMENTED_END_SEND': 238,
|
||||
'INSTRUMENTED_RETURN_VALUE': 239,
|
||||
'INSTRUMENTED_RETURN_CONST': 240,
|
||||
'INSTRUMENTED_YIELD_VALUE': 241,
|
||||
'INSTRUMENTED_LOAD_SUPER_ATTR': 242,
|
||||
'INSTRUMENTED_FOR_ITER': 243,
|
||||
'INSTRUMENTED_CALL': 244,
|
||||
'INSTRUMENTED_CALL_KW': 245,
|
||||
'INSTRUMENTED_CALL_FUNCTION_EX': 246,
|
||||
'INSTRUMENTED_INSTRUCTION': 247,
|
||||
'INSTRUMENTED_JUMP_FORWARD': 248,
|
||||
'INSTRUMENTED_JUMP_BACKWARD': 249,
|
||||
'INSTRUMENTED_POP_JUMP_IF_TRUE': 250,
|
||||
'INSTRUMENTED_POP_JUMP_IF_FALSE': 251,
|
||||
'INSTRUMENTED_POP_JUMP_IF_NONE': 252,
|
||||
'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 253,
|
||||
'INSTRUMENTED_LOAD_SUPER_ATTR': 239,
|
||||
'INSTRUMENTED_FOR_ITER': 240,
|
||||
'INSTRUMENTED_CALL': 241,
|
||||
'INSTRUMENTED_CALL_KW': 242,
|
||||
'INSTRUMENTED_CALL_FUNCTION_EX': 243,
|
||||
'INSTRUMENTED_INSTRUCTION': 244,
|
||||
'INSTRUMENTED_JUMP_FORWARD': 245,
|
||||
'INSTRUMENTED_JUMP_BACKWARD': 246,
|
||||
'INSTRUMENTED_POP_JUMP_IF_TRUE': 247,
|
||||
'INSTRUMENTED_POP_JUMP_IF_FALSE': 248,
|
||||
'INSTRUMENTED_POP_JUMP_IF_NONE': 249,
|
||||
'INSTRUMENTED_POP_JUMP_IF_NOT_NONE': 250,
|
||||
'INSTRUMENTED_RETURN_VALUE': 251,
|
||||
'INSTRUMENTED_RETURN_CONST': 252,
|
||||
'INSTRUMENTED_YIELD_VALUE': 253,
|
||||
'JUMP': 256,
|
||||
'JUMP_NO_INTERRUPT': 257,
|
||||
'LOAD_CLOSURE': 258,
|
||||
|
@ -2857,7 +2857,7 @@ output.append(4)
|
||||
output.append(1)
|
||||
1 / 0
|
||||
|
||||
@jump_test(3, 2, [2, 5], event='return')
|
||||
@jump_test(3, 2, [2, 2, 5], event='return')
|
||||
def test_jump_from_yield(output):
|
||||
def gen():
|
||||
output.append(2)
|
||||
|
@ -945,48 +945,25 @@ dummy_func(
|
||||
LLTRACE_RESUME_FRAME();
|
||||
}
|
||||
|
||||
inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
|
||||
tier1 op(_RETURN_VALUE_EVENT, (val -- val)) {
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
|
||||
if (err) ERROR_NO_POP();
|
||||
STACK_SHRINK(1);
|
||||
assert(EMPTY());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
assert(frame != &entry_frame);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
_PyFrame_StackPush(frame, retval);
|
||||
LOAD_IP(frame->return_offset);
|
||||
goto resume_frame;
|
||||
}
|
||||
|
||||
macro(INSTRUMENTED_RETURN_VALUE) =
|
||||
_RETURN_VALUE_EVENT +
|
||||
RETURN_VALUE;
|
||||
|
||||
macro(RETURN_CONST) =
|
||||
LOAD_CONST +
|
||||
RETURN_VALUE;
|
||||
|
||||
inst(INSTRUMENTED_RETURN_CONST, (--)) {
|
||||
PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, retval);
|
||||
if (err) ERROR_NO_POP();
|
||||
Py_INCREF(retval);
|
||||
assert(EMPTY());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
assert(frame != &entry_frame);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
_PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(retval));
|
||||
LOAD_IP(frame->return_offset);
|
||||
goto resume_frame;
|
||||
}
|
||||
macro(INSTRUMENTED_RETURN_CONST) =
|
||||
LOAD_CONST +
|
||||
_RETURN_VALUE_EVENT +
|
||||
RETURN_VALUE;
|
||||
|
||||
inst(GET_AITER, (obj -- iter)) {
|
||||
unaryfunc getter = NULL;
|
||||
@ -1183,31 +1160,6 @@ dummy_func(
|
||||
_SEND_GEN_FRAME +
|
||||
_PUSH_FRAME;
|
||||
|
||||
inst(INSTRUMENTED_YIELD_VALUE, (retval -- unused)) {
|
||||
assert(frame != &entry_frame);
|
||||
frame->instr_ptr = next_instr;
|
||||
PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
|
||||
assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
|
||||
assert(oparg == 0 || oparg == 1);
|
||||
gen->gi_frame_state = FRAME_SUSPENDED + oparg;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_YIELD,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
|
||||
if (err) ERROR_NO_POP();
|
||||
tstate->exc_info = gen->gi_exc_state.previous_item;
|
||||
gen->gi_exc_state.previous_item = NULL;
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
_PyInterpreterFrame *gen_frame = frame;
|
||||
frame = tstate->current_frame = frame->previous;
|
||||
gen_frame->previous = NULL;
|
||||
_PyFrame_StackPush(frame, retval);
|
||||
/* We don't know which of these is relevant here, so keep them equal */
|
||||
assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
|
||||
goto resume_frame;
|
||||
}
|
||||
|
||||
inst(YIELD_VALUE, (retval -- value)) {
|
||||
// NOTE: It's important that YIELD_VALUE never raises an exception!
|
||||
// The compiler treats any exception raised here as a failed close()
|
||||
@ -1244,6 +1196,23 @@ dummy_func(
|
||||
LLTRACE_RESUME_FRAME();
|
||||
}
|
||||
|
||||
tier1 op(_YIELD_VALUE_EVENT, (val -- val)) {
|
||||
SAVE_SP();
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_YIELD,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
|
||||
LOAD_SP();
|
||||
if (err) ERROR_NO_POP();
|
||||
if (frame->instr_ptr != this_instr) {
|
||||
next_instr = frame->instr_ptr;
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
|
||||
macro(INSTRUMENTED_YIELD_VALUE) =
|
||||
_YIELD_VALUE_EVENT +
|
||||
YIELD_VALUE;
|
||||
|
||||
inst(POP_EXCEPT, (exc_value -- )) {
|
||||
_PyErr_StackItem *exc_info = tstate->exc_info;
|
||||
Py_XSETREF(exc_info->exc_value,
|
||||
@ -4450,6 +4419,36 @@ dummy_func(
|
||||
assert(oparg >= 2);
|
||||
}
|
||||
|
||||
inst(INSTRUMENTED_LINE, ( -- )) {
|
||||
int original_opcode = 0;
|
||||
if (tstate->tracing) {
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyCode_CODE(code))].original_opcode;
|
||||
next_instr = this_instr;
|
||||
} else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
original_opcode = _Py_call_instrumentation_line(
|
||||
tstate, frame, this_instr, prev_instr);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (original_opcode < 0) {
|
||||
next_instr = this_instr+1;
|
||||
goto error;
|
||||
}
|
||||
next_instr = frame->instr_ptr;
|
||||
if (next_instr != this_instr) {
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
if (_PyOpcode_Caches[original_opcode]) {
|
||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
|
||||
/* Prevent the underlying instruction from specializing
|
||||
* and overwriting the instrumentation. */
|
||||
PAUSE_ADAPTIVE_COUNTER(cache->counter);
|
||||
}
|
||||
opcode = original_opcode;
|
||||
DISPATCH_GOTO();
|
||||
}
|
||||
|
||||
inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
|
||||
int next_opcode = _Py_call_instrumentation_instruction(
|
||||
tstate, frame, this_instr);
|
||||
|
@ -835,46 +835,6 @@ resume_frame:
|
||||
|
||||
#include "generated_cases.c.h"
|
||||
|
||||
/* INSTRUMENTED_LINE has to be here, rather than in bytecodes.c,
|
||||
* because it needs to capture frame->instr_ptr before it is updated,
|
||||
* as happens in the standard instruction prologue.
|
||||
*/
|
||||
#if USE_COMPUTED_GOTOS
|
||||
TARGET_INSTRUMENTED_LINE:
|
||||
#else
|
||||
case INSTRUMENTED_LINE:
|
||||
#endif
|
||||
{
|
||||
_Py_CODEUNIT *prev = frame->instr_ptr;
|
||||
_Py_CODEUNIT *here = frame->instr_ptr = next_instr;
|
||||
int original_opcode = 0;
|
||||
if (tstate->tracing) {
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
original_opcode = code->_co_monitoring->lines[(int)(here - _PyCode_CODE(code))].original_opcode;
|
||||
} else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
original_opcode = _Py_call_instrumentation_line(
|
||||
tstate, frame, here, prev);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (original_opcode < 0) {
|
||||
next_instr = here+1;
|
||||
goto error;
|
||||
}
|
||||
next_instr = frame->instr_ptr;
|
||||
if (next_instr != here) {
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
if (_PyOpcode_Caches[original_opcode]) {
|
||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
|
||||
/* Prevent the underlying instruction from specializing
|
||||
* and overwriting the instrumentation. */
|
||||
PAUSE_ADAPTIVE_COUNTER(cache->counter);
|
||||
}
|
||||
opcode = original_opcode;
|
||||
DISPATCH_GOTO();
|
||||
}
|
||||
|
||||
|
||||
#if USE_COMPUTED_GOTOS
|
||||
_unknown_opcode:
|
||||
|
@ -402,7 +402,10 @@ static inline void _Py_LeaveRecursiveCallPy(PyThreadState *tstate) {
|
||||
/* There's no STORE_IP(), it's inlined by the code generator. */
|
||||
|
||||
#define LOAD_SP() \
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame)
|
||||
|
||||
#define SAVE_SP() \
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer)
|
||||
|
||||
/* Tier-switching macros. */
|
||||
|
||||
|
8
Python/executor_cases.c.h
generated
8
Python/executor_cases.c.h
generated
@ -1153,10 +1153,6 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
/* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
case _GET_AITER: {
|
||||
_PyStackRef obj;
|
||||
_PyStackRef iter;
|
||||
@ -1304,8 +1300,6 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
case _YIELD_VALUE: {
|
||||
_PyStackRef retval;
|
||||
_PyStackRef value;
|
||||
@ -4913,6 +4907,8 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
/* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
||||
/* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 because it is instrumented */
|
||||
|
222
Python/generated_cases.c.h
generated
222
Python/generated_cases.c.h
generated
@ -3657,6 +3657,41 @@
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(INSTRUMENTED_LINE) {
|
||||
_Py_CODEUNIT *prev_instr = frame->instr_ptr;
|
||||
_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
|
||||
(void)this_instr;
|
||||
next_instr += 1;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_LINE);
|
||||
int original_opcode = 0;
|
||||
if (tstate->tracing) {
|
||||
PyCodeObject *code = _PyFrame_GetCode(frame);
|
||||
original_opcode = code->_co_monitoring->lines[(int)(this_instr - _PyCode_CODE(code))].original_opcode;
|
||||
next_instr = this_instr;
|
||||
} else {
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
original_opcode = _Py_call_instrumentation_line(
|
||||
tstate, frame, this_instr, prev_instr);
|
||||
stack_pointer = _PyFrame_GetStackPointer(frame);
|
||||
if (original_opcode < 0) {
|
||||
next_instr = this_instr+1;
|
||||
goto error;
|
||||
}
|
||||
next_instr = frame->instr_ptr;
|
||||
if (next_instr != this_instr) {
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
if (_PyOpcode_Caches[original_opcode]) {
|
||||
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
|
||||
/* Prevent the underlying instruction from specializing
|
||||
* and overwriting the instrumentation. */
|
||||
PAUSE_ADAPTIVE_COUNTER(cache->counter);
|
||||
}
|
||||
opcode = original_opcode;
|
||||
DISPATCH_GOTO();
|
||||
}
|
||||
|
||||
TARGET(INSTRUMENTED_LOAD_SUPER_ATTR) {
|
||||
_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
|
||||
(void)this_instr;
|
||||
@ -3785,23 +3820,44 @@
|
||||
(void)this_instr;
|
||||
next_instr += 1;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_RETURN_CONST);
|
||||
PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, retval);
|
||||
if (err) goto error;
|
||||
Py_INCREF(retval);
|
||||
assert(EMPTY());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
assert(frame != &entry_frame);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
_PyFrame_StackPush(frame, PyStackRef_FromPyObjectSteal(retval));
|
||||
LOAD_IP(frame->return_offset);
|
||||
goto resume_frame;
|
||||
_PyStackRef value;
|
||||
_PyStackRef val;
|
||||
_PyStackRef retval;
|
||||
_PyStackRef res;
|
||||
// _LOAD_CONST
|
||||
{
|
||||
value = PyStackRef_FromPyObjectNew(GETITEM(FRAME_CO_CONSTS, oparg));
|
||||
}
|
||||
// _RETURN_VALUE_EVENT
|
||||
val = value;
|
||||
{
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
|
||||
if (err) goto error;
|
||||
}
|
||||
// _RETURN_VALUE
|
||||
retval = val;
|
||||
{
|
||||
#if TIER_ONE
|
||||
assert(frame != &entry_frame);
|
||||
#endif
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
assert(EMPTY());
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
LOAD_SP();
|
||||
LOAD_IP(frame->return_offset);
|
||||
res = retval;
|
||||
LLTRACE_RESUME_FRAME();
|
||||
}
|
||||
stack_pointer[0] = res;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(INSTRUMENTED_RETURN_VALUE) {
|
||||
@ -3809,24 +3865,41 @@
|
||||
(void)this_instr;
|
||||
next_instr += 1;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_RETURN_VALUE);
|
||||
_PyStackRef val;
|
||||
_PyStackRef retval;
|
||||
retval = stack_pointer[-1];
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
|
||||
if (err) goto error;
|
||||
STACK_SHRINK(1);
|
||||
assert(EMPTY());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
assert(frame != &entry_frame);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
_PyFrame_StackPush(frame, retval);
|
||||
LOAD_IP(frame->return_offset);
|
||||
goto resume_frame;
|
||||
_PyStackRef res;
|
||||
// _RETURN_VALUE_EVENT
|
||||
val = stack_pointer[-1];
|
||||
{
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_RETURN,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
|
||||
if (err) goto error;
|
||||
}
|
||||
// _RETURN_VALUE
|
||||
retval = val;
|
||||
{
|
||||
#if TIER_ONE
|
||||
assert(frame != &entry_frame);
|
||||
#endif
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
assert(EMPTY());
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
// GH-99729: We need to unlink the frame *before* clearing it:
|
||||
_PyInterpreterFrame *dying = frame;
|
||||
frame = tstate->current_frame = dying->previous;
|
||||
_PyEval_FrameClearAndPop(tstate, dying);
|
||||
LOAD_SP();
|
||||
LOAD_IP(frame->return_offset);
|
||||
res = retval;
|
||||
LLTRACE_RESUME_FRAME();
|
||||
}
|
||||
stack_pointer[0] = res;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(INSTRUMENTED_YIELD_VALUE) {
|
||||
@ -3834,30 +3907,65 @@
|
||||
(void)this_instr;
|
||||
next_instr += 1;
|
||||
INSTRUCTION_STATS(INSTRUMENTED_YIELD_VALUE);
|
||||
_PyStackRef val;
|
||||
_PyStackRef retval;
|
||||
retval = stack_pointer[-1];
|
||||
assert(frame != &entry_frame);
|
||||
frame->instr_ptr = next_instr;
|
||||
PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
|
||||
assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
|
||||
assert(oparg == 0 || oparg == 1);
|
||||
gen->gi_frame_state = FRAME_SUSPENDED + oparg;
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_YIELD,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(retval));
|
||||
if (err) goto error;
|
||||
tstate->exc_info = gen->gi_exc_state.previous_item;
|
||||
gen->gi_exc_state.previous_item = NULL;
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
_PyInterpreterFrame *gen_frame = frame;
|
||||
frame = tstate->current_frame = frame->previous;
|
||||
gen_frame->previous = NULL;
|
||||
_PyFrame_StackPush(frame, retval);
|
||||
/* We don't know which of these is relevant here, so keep them equal */
|
||||
assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
|
||||
goto resume_frame;
|
||||
_PyStackRef value;
|
||||
// _YIELD_VALUE_EVENT
|
||||
val = stack_pointer[-1];
|
||||
{
|
||||
SAVE_SP();
|
||||
int err = _Py_call_instrumentation_arg(
|
||||
tstate, PY_MONITORING_EVENT_PY_YIELD,
|
||||
frame, this_instr, PyStackRef_AsPyObjectBorrow(val));
|
||||
LOAD_SP();
|
||||
if (err) goto error;
|
||||
if (frame->instr_ptr != this_instr) {
|
||||
next_instr = frame->instr_ptr;
|
||||
DISPATCH();
|
||||
}
|
||||
}
|
||||
// _YIELD_VALUE
|
||||
retval = val;
|
||||
{
|
||||
// NOTE: It's important that YIELD_VALUE never raises an exception!
|
||||
// The compiler treats any exception raised here as a failed close()
|
||||
// or throw() call.
|
||||
#if TIER_ONE
|
||||
assert(frame != &entry_frame);
|
||||
#endif
|
||||
frame->instr_ptr++;
|
||||
PyGenObject *gen = _PyGen_GetGeneratorFromFrame(frame);
|
||||
assert(FRAME_SUSPENDED_YIELD_FROM == FRAME_SUSPENDED + 1);
|
||||
assert(oparg == 0 || oparg == 1);
|
||||
gen->gi_frame_state = FRAME_SUSPENDED + oparg;
|
||||
stack_pointer += -1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
_PyFrame_SetStackPointer(frame, stack_pointer);
|
||||
tstate->exc_info = gen->gi_exc_state.previous_item;
|
||||
gen->gi_exc_state.previous_item = NULL;
|
||||
_Py_LeaveRecursiveCallPy(tstate);
|
||||
_PyInterpreterFrame *gen_frame = frame;
|
||||
frame = tstate->current_frame = frame->previous;
|
||||
gen_frame->previous = NULL;
|
||||
/* We don't know which of these is relevant here, so keep them equal */
|
||||
assert(INLINE_CACHE_ENTRIES_SEND == INLINE_CACHE_ENTRIES_FOR_ITER);
|
||||
#if TIER_ONE
|
||||
assert(frame->instr_ptr->op.code == INSTRUMENTED_LINE ||
|
||||
frame->instr_ptr->op.code == INSTRUMENTED_INSTRUCTION ||
|
||||
_PyOpcode_Deopt[frame->instr_ptr->op.code] == SEND ||
|
||||
_PyOpcode_Deopt[frame->instr_ptr->op.code] == FOR_ITER ||
|
||||
_PyOpcode_Deopt[frame->instr_ptr->op.code] == INTERPRETER_EXIT ||
|
||||
_PyOpcode_Deopt[frame->instr_ptr->op.code] == ENTER_EXECUTOR);
|
||||
#endif
|
||||
LOAD_IP(1 + INLINE_CACHE_ENTRIES_SEND);
|
||||
LOAD_SP();
|
||||
value = retval;
|
||||
LLTRACE_RESUME_FRAME();
|
||||
}
|
||||
stack_pointer[0] = value;
|
||||
stack_pointer += 1;
|
||||
assert(WITHIN_STACK_BOUNDS());
|
||||
DISPATCH();
|
||||
}
|
||||
|
||||
TARGET(INTERPRETER_EXIT) {
|
||||
|
6
Python/opcode_targets.h
generated
6
Python/opcode_targets.h
generated
@ -238,9 +238,6 @@ static void *opcode_targets[256] = {
|
||||
&&TARGET_INSTRUMENTED_RESUME,
|
||||
&&TARGET_INSTRUMENTED_END_FOR,
|
||||
&&TARGET_INSTRUMENTED_END_SEND,
|
||||
&&TARGET_INSTRUMENTED_RETURN_VALUE,
|
||||
&&TARGET_INSTRUMENTED_RETURN_CONST,
|
||||
&&TARGET_INSTRUMENTED_YIELD_VALUE,
|
||||
&&TARGET_INSTRUMENTED_LOAD_SUPER_ATTR,
|
||||
&&TARGET_INSTRUMENTED_FOR_ITER,
|
||||
&&TARGET_INSTRUMENTED_CALL,
|
||||
@ -253,6 +250,9 @@ static void *opcode_targets[256] = {
|
||||
&&TARGET_INSTRUMENTED_POP_JUMP_IF_FALSE,
|
||||
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NONE,
|
||||
&&TARGET_INSTRUMENTED_POP_JUMP_IF_NOT_NONE,
|
||||
&&TARGET_INSTRUMENTED_RETURN_VALUE,
|
||||
&&TARGET_INSTRUMENTED_RETURN_CONST,
|
||||
&&TARGET_INSTRUMENTED_YIELD_VALUE,
|
||||
&&TARGET_INSTRUMENTED_LINE,
|
||||
&&_unknown_opcode,
|
||||
};
|
||||
|
8
Python/optimizer_cases.c.h
generated
8
Python/optimizer_cases.c.h
generated
@ -621,10 +621,6 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 */
|
||||
|
||||
/* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 */
|
||||
|
||||
case _GET_AITER: {
|
||||
_Py_UopsSymbol *iter;
|
||||
iter = sym_new_not_null(ctx);
|
||||
@ -656,8 +652,6 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 */
|
||||
|
||||
case _YIELD_VALUE: {
|
||||
_Py_UopsSymbol *res;
|
||||
res = sym_new_unknown(ctx);
|
||||
@ -2056,6 +2050,8 @@
|
||||
break;
|
||||
}
|
||||
|
||||
/* _INSTRUMENTED_LINE is not a viable micro-op for tier 2 */
|
||||
|
||||
/* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */
|
||||
|
||||
/* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */
|
||||
|
@ -27,6 +27,7 @@ class Properties:
|
||||
tier: int | None = None
|
||||
oparg_and_1: bool = False
|
||||
const_oparg: int = -1
|
||||
needs_prev: bool = False
|
||||
|
||||
def dump(self, indent: str) -> None:
|
||||
print(indent, end="")
|
||||
@ -53,6 +54,7 @@ class Properties:
|
||||
has_free=any(p.has_free for p in properties),
|
||||
side_exit=any(p.side_exit for p in properties),
|
||||
pure=all(p.pure for p in properties),
|
||||
needs_prev=any(p.needs_prev for p in properties),
|
||||
)
|
||||
|
||||
@property
|
||||
@ -618,6 +620,7 @@ def compute_properties(op: parser.InstDef) -> Properties:
|
||||
has_free=has_free,
|
||||
pure="pure" in op.annotations,
|
||||
tier=tier_variable(op),
|
||||
needs_prev=variable_used(op, "prev_instr"),
|
||||
)
|
||||
|
||||
|
||||
@ -797,12 +800,6 @@ def assign_opcodes(
|
||||
|
||||
instrumented = [name for name in instructions if name.startswith("INSTRUMENTED")]
|
||||
|
||||
# Special case: this instruction is implemented in ceval.c
|
||||
# rather than bytecodes.c, so we need to add it explicitly
|
||||
# here (at least until we add something to bytecodes.c to
|
||||
# declare external instructions).
|
||||
instrumented.append("INSTRUMENTED_LINE")
|
||||
|
||||
specialized: set[str] = set()
|
||||
no_arg: list[str] = []
|
||||
has_arg: list[str] = []
|
||||
|
@ -151,7 +151,6 @@ def generate_deopt_table(analysis: Analysis, out: CWriter) -> None:
|
||||
if inst.family is not None:
|
||||
deopt = inst.family.name
|
||||
deopts.append((inst.name, deopt))
|
||||
deopts.append(("INSTRUMENTED_LINE", "INSTRUMENTED_LINE"))
|
||||
for name, deopt in sorted(deopts):
|
||||
out.emit(f"[{name}] = {deopt},\n")
|
||||
out.emit("};\n\n")
|
||||
@ -179,7 +178,6 @@ def generate_name_table(analysis: Analysis, out: CWriter) -> None:
|
||||
out.emit("#ifdef NEED_OPCODE_METADATA\n")
|
||||
out.emit(f"const char *_PyOpcode_OpName[{table_size}] = {{\n")
|
||||
names = list(analysis.instructions) + list(analysis.pseudos)
|
||||
names.append("INSTRUMENTED_LINE")
|
||||
for name in sorted(names):
|
||||
out.emit(f'[{name}] = "{name}",\n')
|
||||
out.emit("};\n")
|
||||
|
@ -148,6 +148,8 @@ def generate_tier1(
|
||||
out.emit("\n")
|
||||
out.emit(f"TARGET({name}) {{\n")
|
||||
unused_guard = "(void)this_instr;\n" if inst.family is None else ""
|
||||
if inst.properties.needs_prev:
|
||||
out.emit(f"_Py_CODEUNIT *prev_instr = frame->instr_ptr;\n")
|
||||
if needs_this and not inst.is_target:
|
||||
out.emit(f"_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;\n")
|
||||
out.emit(unused_guard)
|
||||
|
Loading…
Reference in New Issue
Block a user