GH-111485: Increment next_instr consistently at the start of the instruction. (GH-111486)

This commit is contained in:
Mark Shannon 2023-10-31 10:09:54 +00:00 committed by GitHub
parent e3353c498d
commit d27acd4461
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 1079 additions and 521 deletions

View file

@ -1356,8 +1356,8 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {
[_GUARD_BOTH_UNICODE] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG },
[_BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG },
[BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_DEOPT_FLAG | HAS_ERROR_FLAG },
[_BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IX, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IX, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG },
[_BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG },
[BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG },
[BINARY_SUBSCR] = { true, INSTR_FMT_IXC, HAS_ERROR_FLAG },
[BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG },
[STORE_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG },
@ -1434,7 +1434,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {
[DICT_UPDATE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[DICT_MERGE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[MAP_ADD] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG },
[INSTRUMENTED_LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG },
[LOAD_SUPER_ATTR] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG },
[LOAD_SUPER_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG },
[LOAD_ZERO_SUPER_METHOD] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_NAME_FLAG | HAS_ERROR_FLAG },
@ -1495,7 +1495,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {
[GET_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG },
[GET_YIELD_FROM_ITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG },
[FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ERROR_FLAG },
[INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[INSTRUMENTED_FOR_ITER] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[_ITER_CHECK_LIST] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG },
[_ITER_JUMP_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_JUMP_FLAG },
[_IS_ITER_EXHAUSTED_LIST] = { true, INSTR_FMT_IX, 0 },
@ -1533,7 +1533,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {
[_CHECK_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IX, HAS_DEOPT_FLAG },
[_LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC000, HAS_ARG_FLAG },
[LOAD_ATTR_METHOD_LAZY_DICT] = { true, INSTR_FMT_IBC00000000, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[INSTRUMENTED_CALL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[INSTRUMENTED_CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_ERROR_FLAG },
[CALL] = { true, INSTR_FMT_IBC00, HAS_ARG_FLAG | HAS_EVAL_BREAK_FLAG | HAS_ERROR_FLAG },
[_CHECK_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[_INIT_CALL_BOUND_METHOD_EXACT_ARGS] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
@ -2047,6 +2047,7 @@ extern const uint8_t _PyOpcode_Caches[256];
#ifdef NEED_OPCODE_METADATA
const uint8_t _PyOpcode_Caches[256] = {
[TO_BOOL] = 3,
[BINARY_OP_INPLACE_ADD_UNICODE] = 1,
[BINARY_SUBSCR] = 1,
[STORE_SUBSCR] = 1,
[SEND] = 1,

View file

@ -138,6 +138,9 @@ def test_inst_no_args(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
spam();
DISPATCH();
}
@ -152,6 +155,9 @@ def test_inst_one_pop(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *value;
value = stack_pointer[-1];
spam();
@ -169,6 +175,9 @@ def test_inst_one_push(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *res;
spam();
STACK_GROW(1);
@ -186,6 +195,9 @@ def test_inst_one_push_one_pop(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *value;
PyObject *res;
value = stack_pointer[-1];
@ -204,6 +216,9 @@ def test_binary_op(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *right;
PyObject *left;
PyObject *res;
@ -225,6 +240,9 @@ def test_overlap(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *right;
PyObject *left;
PyObject *result;
@ -249,6 +267,9 @@ def test_predictions_and_eval_breaker(self):
"""
output = """
TARGET(OP1) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP1);
PREDICTED(OP1);
static_assert(INLINE_CACHE_ENTRIES_OP1 == 0, "incorrect cache size");
PyObject *arg;
@ -259,6 +280,9 @@ def test_predictions_and_eval_breaker(self):
}
TARGET(OP3) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP3);
PyObject *arg;
PyObject *res;
arg = stack_pointer[-1];
@ -278,6 +302,9 @@ def test_error_if_plain(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
if (cond) goto label;
DISPATCH();
}
@ -292,6 +319,9 @@ def test_error_if_plain_with_comment(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
if (cond) goto label;
DISPATCH();
}
@ -306,6 +336,9 @@ def test_error_if_pop(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *right;
PyObject *left;
PyObject *res;
@ -326,12 +359,14 @@ def test_cache_effect(self):
"""
output = """
TARGET(OP) {
_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
next_instr += 4;
INSTRUCTION_STATS(OP);
PyObject *value;
value = stack_pointer[-1];
uint16_t counter = read_u16(&next_instr[0].cache);
uint32_t extra = read_u32(&next_instr[1].cache);
uint16_t counter = read_u16(&this_instr[1].cache);
uint32_t extra = read_u32(&this_instr[2].cache);
STACK_SHRINK(1);
next_instr += 3;
DISPATCH();
}
"""
@ -345,6 +380,9 @@ def test_suppress_dispatch(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
goto somewhere;
}
"""
@ -366,18 +404,24 @@ def test_macro_instruction(self):
"""
output = """
TARGET(OP1) {
_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;
next_instr += 2;
INSTRUCTION_STATS(OP1);
PyObject *right;
PyObject *left;
right = stack_pointer[-1];
left = stack_pointer[-2];
uint16_t counter = read_u16(&next_instr[0].cache);
uint16_t counter = read_u16(&this_instr[1].cache);
op1(left, right);
next_instr += 1;
DISPATCH();
}
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 6;
INSTRUCTION_STATS(OP);
PREDICTED(OP);
_Py_CODEUNIT *this_instr = next_instr - 6;
static_assert(INLINE_CACHE_ENTRIES_OP == 5, "incorrect cache size");
PyObject *right;
PyObject *left;
@ -387,22 +431,24 @@ def test_macro_instruction(self):
right = stack_pointer[-1];
left = stack_pointer[-2];
{
uint16_t counter = read_u16(&next_instr[0].cache);
uint16_t counter = read_u16(&this_instr[1].cache);
op1(left, right);
}
// OP2
arg2 = stack_pointer[-3];
{
uint32_t extra = read_u32(&next_instr[3].cache);
uint32_t extra = read_u32(&this_instr[4].cache);
res = op2(arg2, left, right);
}
STACK_SHRINK(2);
stack_pointer[-1] = res;
next_instr += 5;
DISPATCH();
}
TARGET(OP3) {
frame->instr_ptr = next_instr;
next_instr += 6;
INSTRUCTION_STATS(OP3);
PyObject *right;
PyObject *left;
PyObject *arg2;
@ -413,7 +459,6 @@ def test_macro_instruction(self):
res = op3(arg2, left, right);
STACK_SHRINK(2);
stack_pointer[-1] = res;
next_instr += 5;
DISPATCH();
}
"""
@ -427,6 +472,9 @@ def test_array_input(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *above;
PyObject **values;
PyObject *below;
@ -449,6 +497,9 @@ def test_array_output(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *below;
PyObject **values;
PyObject *above;
@ -470,6 +521,9 @@ def test_array_input_output(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject **values;
PyObject *above;
values = stack_pointer - oparg;
@ -489,6 +543,9 @@ def test_array_error_if(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject **values;
PyObject *extra;
values = stack_pointer - oparg;
@ -509,6 +566,9 @@ def test_cond_effect(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
PyObject *cc;
PyObject *input = NULL;
PyObject *aa;
@ -541,6 +601,9 @@ def test_macro_cond_effect(self):
"""
output = """
TARGET(M) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(M);
PyObject *right;
PyObject *middle;
PyObject *left;
@ -580,6 +643,9 @@ def test_macro_push_push(self):
"""
output = """
TARGET(M) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(M);
PyObject *val1;
PyObject *val2;
// A
@ -609,6 +675,9 @@ def test_override_inst(self):
"""
output = """
TARGET(OP) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(OP);
ham();
DISPATCH();
}
@ -627,6 +696,9 @@ def test_override_op(self):
"""
output = """
TARGET(M) {
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(M);
ham();
DISPATCH();
}

View file

@ -1378,15 +1378,16 @@ def func():
x = 4
else:
x = 6
7
self.check_events(func, recorders = JUMP_AND_BRANCH_RECORDERS, expected = [
('branch', 'func', 2, 2),
('branch', 'func', 3, 4),
('branch', 'func', 3, 6),
('jump', 'func', 6, 2),
('branch', 'func', 2, 2),
('branch', 'func', 3, 3),
('branch', 'func', 3, 4),
('jump', 'func', 4, 2),
('branch', 'func', 2, 2)])
('branch', 'func', 2, 7)])
self.check_events(func, recorders = JUMP_BRANCH_AND_LINE_RECORDERS, expected = [
('line', 'get_events', 10),
@ -1394,17 +1395,18 @@ def func():
('line', 'func', 2),
('branch', 'func', 2, 2),
('line', 'func', 3),
('branch', 'func', 3, 4),
('branch', 'func', 3, 6),
('line', 'func', 6),
('jump', 'func', 6, 2),
('line', 'func', 2),
('branch', 'func', 2, 2),
('line', 'func', 3),
('branch', 'func', 3, 3),
('branch', 'func', 3, 4),
('line', 'func', 4),
('jump', 'func', 4, 2),
('line', 'func', 2),
('branch', 'func', 2, 2),
('branch', 'func', 2, 7),
('line', 'func', 7),
('line', 'get_events', 11)])
def test_except_star(self):
@ -1434,7 +1436,7 @@ def func():
('line', 'meth', 1),
('jump', 'func', 5, 5),
('jump', 'func', 5, '[offset=114]'),
('branch', 'func', '[offset=120]', '[offset=122]'),
('branch', 'func', '[offset=120]', '[offset=124]'),
('line', 'get_events', 11)])
self.check_events(func, recorders = FLOW_AND_LINE_RECORDERS, expected = [
@ -1450,7 +1452,7 @@ def func():
('return', None),
('jump', 'func', 5, 5),
('jump', 'func', 5, '[offset=114]'),
('branch', 'func', '[offset=120]', '[offset=122]'),
('branch', 'func', '[offset=120]', '[offset=124]'),
('return', None),
('line', 'get_events', 11)])

View file

@ -80,6 +80,7 @@ dummy_func(
pop_1_error:
// Dummy locals.
PyObject *dummy;
_Py_CODEUNIT *this_instr;
PyObject *attr;
PyObject *attrs;
PyObject *bottom;
@ -144,13 +145,13 @@ dummy_func(
if (code_version != global_version) {
int err = _Py_Instrument(_PyFrame_GetCode(frame), tstate->interp);
ERROR_IF(err, error);
next_instr--;
next_instr = this_instr;
}
else {
if (oparg < RESUME_AFTER_YIELD_FROM) {
CHECK_EVAL_BREAKER();
}
next_instr[-1].op.code = RESUME_CHECK;
this_instr->op.code = RESUME_CHECK;
}
}
@ -172,7 +173,7 @@ dummy_func(
if (_Py_Instrument(_PyFrame_GetCode(frame), tstate->interp)) {
goto error;
}
next_instr--;
next_instr = this_instr;
}
else {
if (oparg < 2) {
@ -180,12 +181,12 @@ dummy_func(
}
_PyFrame_SetStackPointer(frame, stack_pointer);
int err = _Py_call_instrumentation(
tstate, oparg > 0, frame, next_instr-1);
tstate, oparg > 0, frame, this_instr);
stack_pointer = _PyFrame_GetStackPointer(frame);
ERROR_IF(err, error);
if (frame->instr_ptr != next_instr-1) {
if (frame->instr_ptr != this_instr) {
/* Instrumentation has jumped */
next_instr = frame->instr_ptr;
next_instr = this_instr;
DISPATCH();
}
}
@ -261,11 +262,12 @@ dummy_func(
macro(END_FOR) = POP_TOP + POP_TOP;
inst(INSTRUMENTED_END_FOR, (receiver, value --)) {
TIER_ONE_ONLY
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
if (PyGen_Check(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
if (monitor_stop_iteration(tstate, frame, this_instr)) {
goto error;
}
PyErr_SetRaisedException(NULL);
@ -278,9 +280,10 @@ dummy_func(
}
inst(INSTRUMENTED_END_SEND, (receiver, value -- value)) {
TIER_ONE_ONLY
if (PyGen_Check(receiver) || PyCoro_CheckExact(receiver)) {
PyErr_SetObject(PyExc_StopIteration, value);
if (monitor_stop_iteration(tstate, frame, next_instr-1)) {
if (monitor_stop_iteration(tstate, frame, this_instr)) {
goto error;
}
PyErr_SetRaisedException(NULL);
@ -310,14 +313,13 @@ dummy_func(
inst(TO_BOOL, (unused/1, unused/2, value -- res)) {
#if ENABLE_SPECIALIZATION
_PyToBoolCache *cache = (_PyToBoolCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_ToBool(value, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(TO_BOOL, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
int err = PyObject_IsTrue(value);
DECREF_INPUTS();
@ -491,10 +493,9 @@ dummy_func(
// So the inputs are the same as for all BINARY_OP
// specializations, but there is no output.
// At the end we just skip over the STORE_FAST.
op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
_Py_CODEUNIT true_next = next_instr[INLINE_CACHE_ENTRIES_BINARY_OP];
assert(true_next.op.code == STORE_FAST);
PyObject **target_local = &GETLOCAL(true_next.op.arg);
op(_BINARY_OP_INPLACE_ADD_UNICODE, (unused/1, left, right --)) {
assert(next_instr->op.code == STORE_FAST);
PyObject **target_local = &GETLOCAL(next_instr->op.arg);
DEOPT_IF(*target_local != left);
STAT_INC(BINARY_OP, hit);
/* Handle `left = left + right` or `left += right` for str.
@ -514,7 +515,8 @@ dummy_func(
_Py_DECREF_SPECIALIZED(right, _PyUnicode_ExactDealloc);
ERROR_IF(*target_local == NULL, error);
// The STORE_FAST is already done.
SKIP_OVER(INLINE_CACHE_ENTRIES_BINARY_OP + 1);
assert(next_instr->op.code == STORE_FAST);
SKIP_OVER(1);
}
macro(BINARY_OP_INPLACE_ADD_UNICODE) =
@ -530,14 +532,13 @@ dummy_func(
inst(BINARY_SUBSCR, (unused/1, container, sub -- res)) {
#if ENABLE_SPECIALIZATION
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_BinarySubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(BINARY_SUBSCR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
res = PyObject_GetItem(container, sub);
DECREF_INPUTS();
@ -656,8 +657,7 @@ dummy_func(
STACK_SHRINK(2);
new_frame->localsplus[0] = container;
new_frame->localsplus[1] = sub;
SKIP_OVER(INLINE_CACHE_ENTRIES_BINARY_SUBSCR);
assert(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_BINARY_SUBSCR;
DISPATCH_INLINED(new_frame);
}
@ -679,14 +679,13 @@ dummy_func(
inst(STORE_SUBSCR, (unused/1, v, container, sub -- )) {
#if ENABLE_SPECIALIZATION
_PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
@ -754,7 +753,7 @@ dummy_func(
case 0:
if (do_raise(tstate, exc, cause)) {
assert(oparg == 0);
monitor_reraise(tstate, frame, next_instr-1);
monitor_reraise(tstate, frame, this_instr);
goto exception_unwind;
}
break;
@ -808,7 +807,7 @@ dummy_func(
inst(INSTRUMENTED_RETURN_VALUE, (retval --)) {
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
frame, this_instr, retval);
if (err) goto error;
STACK_SHRINK(1);
assert(EMPTY());
@ -832,7 +831,7 @@ dummy_func(
PyObject *retval = GETITEM(FRAME_CO_CONSTS, oparg);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_RETURN,
frame, next_instr-1, retval);
frame, this_instr, retval);
if (err) goto error;
Py_INCREF(retval);
assert(EMPTY());
@ -958,14 +957,13 @@ dummy_func(
inst(SEND, (unused/1, receiver, v -- receiver, retval)) {
#if ENABLE_SPECIALIZATION
_PySendCache *cache = (_PySendCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_Send(receiver, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(SEND, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
assert(frame != &entry_frame);
if ((tstate->interp->eval_frame == NULL) &&
@ -979,8 +977,7 @@ dummy_func(
gen->gi_frame_state = FRAME_EXECUTING;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
SKIP_OVER(INLINE_CACHE_ENTRIES_SEND);
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
DISPATCH_INLINED(gen_frame);
}
@ -993,7 +990,7 @@ dummy_func(
if (retval == NULL) {
if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)
) {
monitor_raise(tstate, frame, next_instr-1);
monitor_raise(tstate, frame, this_instr);
}
if (_PyGen_FetchStopIterationValue(&retval) == 0) {
assert(retval != NULL);
@ -1018,8 +1015,7 @@ dummy_func(
gen->gi_frame_state = FRAME_EXECUTING;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
SKIP_OVER(INLINE_CACHE_ENTRIES_SEND);
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_SEND == next_instr - this_instr);
frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_SEND + oparg);
DISPATCH_INLINED(gen_frame);
}
@ -1033,7 +1029,7 @@ dummy_func(
_PyFrame_SetStackPointer(frame, stack_pointer - 1);
int err = _Py_call_instrumentation_arg(
tstate, PY_MONITORING_EVENT_PY_YIELD,
frame, next_instr-1, retval);
frame, this_instr, retval);
if (err) goto error;
tstate->exc_info = gen->gi_exc_state.previous_item;
gen->gi_exc_state.previous_item = NULL;
@ -1093,7 +1089,7 @@ dummy_func(
assert(exc && PyExceptionInstance_Check(exc));
Py_INCREF(exc);
_PyErr_SetRaisedException(tstate, exc);
monitor_reraise(tstate, frame, next_instr-1);
monitor_reraise(tstate, frame, this_instr);
goto exception_unwind;
}
@ -1105,7 +1101,7 @@ dummy_func(
else {
Py_INCREF(exc);
_PyErr_SetRaisedException(tstate, exc);
monitor_reraise(tstate, frame, next_instr-1);
monitor_reraise(tstate, frame, this_instr);
goto exception_unwind;
}
}
@ -1120,7 +1116,7 @@ dummy_func(
}
else {
_PyErr_SetRaisedException(tstate, Py_NewRef(exc_value));
monitor_reraise(tstate, frame, next_instr-1);
monitor_reraise(tstate, frame, this_instr);
goto exception_unwind;
}
}
@ -1184,14 +1180,13 @@ dummy_func(
inst(UNPACK_SEQUENCE, (unused/1, seq -- unused[oparg])) {
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(UNPACK_SEQUENCE, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject **top = stack_pointer + oparg - 1;
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
@ -1247,15 +1242,14 @@ dummy_func(
inst(STORE_ATTR, (unused/1, unused/3, v, owner --)) {
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
next_instr--;
next_instr = this_instr;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
int err = PyObject_SetAttr(owner, name, v);
@ -1369,15 +1363,14 @@ dummy_func(
inst(LOAD_GLOBAL, (unused/1, unused/1, unused/1, unused/1 -- res, null if (oparg & 1))) {
#if ENABLE_SPECIALIZATION
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
next_instr--;
next_instr = this_instr;
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(LOAD_GLOBAL, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
if (PyDict_CheckExact(GLOBALS())
@ -1692,11 +1685,10 @@ dummy_func(
ERROR_IF(_PyDict_SetItem_Take2((PyDictObject *)dict, key, value) != 0, error);
}
inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/9, unused, unused, unused -- unused, unused if (oparg & 1))) {
_PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr;
inst(INSTRUMENTED_LOAD_SUPER_ATTR, (unused/1, unused, unused, unused -- unused, unused if (oparg & 1))) {
// cancel out the decrement that will happen in LOAD_SUPER_ATTR; we
// don't want to specialize instrumented instructions
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
INCREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
GO_TO_INSTRUCTION(LOAD_SUPER_ATTR);
}
@ -1706,24 +1698,24 @@ dummy_func(
};
inst(LOAD_SUPER_ATTR, (unused/1, global_super, class, self -- attr, null if (oparg & 1))) {
TIER_ONE_ONLY
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 2);
int load_method = oparg & 1;
#if ENABLE_SPECIALIZATION
_PySuperAttrCache *cache = (_PySuperAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
int load_method = oparg & 1;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_LoadSuperAttr(global_super, class, next_instr, load_method);
DISPATCH_SAME_OPARG();
}
STAT_INC(LOAD_SUPER_ATTR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
if (opcode == INSTRUMENTED_LOAD_SUPER_ATTR) {
PyObject *arg = oparg & 2 ? class : &_PyInstrumentation_MISSING;
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, global_super, arg);
frame, this_instr, global_super, arg);
ERROR_IF(err, error);
}
@ -1736,12 +1728,12 @@ dummy_func(
if (super == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, global_super, arg);
frame, this_instr, global_super, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, global_super, arg);
frame, this_instr, global_super, arg);
if (err < 0) {
Py_CLEAR(super);
}
@ -1819,15 +1811,14 @@ dummy_func(
inst(LOAD_ATTR, (unused/9, owner -- attr, self_or_null if (oparg & 1))) {
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
next_instr--;
next_instr = this_instr;
_Py_Specialize_LoadAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(LOAD_ATTR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
if (oparg & 1) {
@ -2018,8 +2009,7 @@ dummy_func(
// Manipulate stack directly because we exit with DISPATCH_INLINED().
STACK_SHRINK(1);
new_frame->localsplus[0] = owner;
SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR);
assert(1 + INLINE_CACHE_ENTRIES_LOAD_ATTR == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_LOAD_ATTR == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_LOAD_ATTR;
DISPATCH_INLINED(new_frame);
}
@ -2046,8 +2036,7 @@ dummy_func(
STACK_SHRINK(1);
new_frame->localsplus[0] = owner;
new_frame->localsplus[1] = Py_NewRef(name);
SKIP_OVER(INLINE_CACHE_ENTRIES_LOAD_ATTR);
assert(1 + INLINE_CACHE_ENTRIES_LOAD_ATTR == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_LOAD_ATTR == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_LOAD_ATTR;
DISPATCH_INLINED(new_frame);
}
@ -2142,14 +2131,13 @@ dummy_func(
inst(COMPARE_OP, (unused/1, left, right -- res)) {
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
assert((oparg >> 5) <= Py_GE);
res = PyObject_RichCompare(left, right, oparg >> 5);
@ -2277,24 +2265,23 @@ dummy_func(
inst(JUMP_BACKWARD, (--)) {
CHECK_EVAL_BREAKER();
_Py_CODEUNIT *here = next_instr - 1;
assert(oparg <= INSTR_OFFSET());
JUMPBY(1-oparg);
#if ENABLE_SPECIALIZATION
here[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER);
if (here[1].cache > tstate->interp->optimizer_backedge_threshold &&
this_instr[1].cache += (1 << OPTIMIZER_BITS_IN_COUNTER);
if (this_instr[1].cache > tstate->interp->optimizer_backedge_threshold &&
// Double-check that the opcode isn't instrumented or something:
here->op.code == JUMP_BACKWARD)
this_instr->op.code == JUMP_BACKWARD)
{
OPT_STAT_INC(attempts);
int optimized = _PyOptimizer_BackEdge(frame, here, next_instr, stack_pointer);
int optimized = _PyOptimizer_BackEdge(frame, this_instr, next_instr, stack_pointer);
ERROR_IF(optimized < 0, error);
if (optimized) {
// Rewind and enter the executor:
assert(here->op.code == ENTER_EXECUTOR);
next_instr = here;
assert(this_instr->op.code == ENTER_EXECUTOR);
next_instr = this_instr;
}
here[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
this_instr[1].cache &= ((1 << OPTIMIZER_BITS_IN_COUNTER) - 1);
}
#endif /* ENABLE_SPECIALIZATION */
}
@ -2331,7 +2318,7 @@ dummy_func(
assert(PyBool_Check(cond));
int flag = Py_IsFalse(cond);
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | flag;
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
#endif
JUMPBY(oparg * flag);
}
@ -2340,7 +2327,7 @@ dummy_func(
assert(PyBool_Check(cond));
int flag = Py_IsTrue(cond);
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | flag;
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
#endif
JUMPBY(oparg * flag);
}
@ -2456,14 +2443,13 @@ dummy_func(
inst(FOR_ITER, (unused/1, iter -- iter, next)) {
#if ENABLE_SPECIALIZATION
_PyForIterCache *cache = (_PyForIterCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_ForIter(iter, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(FOR_ITER, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
next = (*Py_TYPE(iter)->tp_iternext)(iter);
@ -2472,15 +2458,14 @@ dummy_func(
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
monitor_raise(tstate, frame, next_instr-1);
monitor_raise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
Py_DECREF(iter);
STACK_SHRINK(1);
SKIP_OVER(INLINE_CACHE_ENTRIES_FOR_ITER);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
@ -2488,32 +2473,31 @@ dummy_func(
// Common case: no jump, leave it to the code generator
}
inst(INSTRUMENTED_FOR_ITER, ( -- )) {
_Py_CODEUNIT *here = frame->instr_ptr;
inst(INSTRUMENTED_FOR_ITER, (unused/1 -- )) {
_Py_CODEUNIT *target;
PyObject *iter = TOP();
PyObject *next = (*Py_TYPE(iter)->tp_iternext)(iter);
if (next != NULL) {
PUSH(next);
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER;
target = next_instr;
}
else {
if (_PyErr_Occurred(tstate)) {
if (!_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
goto error;
}
monitor_raise(tstate, frame, here);
monitor_raise(tstate, frame, this_instr);
_PyErr_Clear(tstate);
}
/* iterator ended normally */
assert(next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == END_FOR ||
next_instr[INLINE_CACHE_ENTRIES_FOR_ITER + oparg].op.code == INSTRUMENTED_END_FOR);
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
STACK_SHRINK(1);
Py_DECREF(iter);
/* Skip END_FOR */
target = next_instr + INLINE_CACHE_ENTRIES_FOR_ITER + oparg + 1;
target = next_instr + oparg + 1;
}
INSTRUMENTED_JUMP(here, target, PY_MONITORING_EVENT_BRANCH);
INSTRUMENTED_JUMP(this_instr, target, PY_MONITORING_EVENT_BRANCH);
}
op(_ITER_CHECK_LIST, (iter -- iter)) {
@ -2532,7 +2516,6 @@ dummy_func(
}
Py_DECREF(iter);
STACK_SHRINK(1);
SKIP_OVER(INLINE_CACHE_ENTRIES_FOR_ITER);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
@ -2588,7 +2571,6 @@ dummy_func(
}
Py_DECREF(iter);
STACK_SHRINK(1);
SKIP_OVER(INLINE_CACHE_ENTRIES_FOR_ITER);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
@ -2640,7 +2622,6 @@ dummy_func(
if (r->len <= 0) {
STACK_SHRINK(1);
Py_DECREF(r);
SKIP_OVER(INLINE_CACHE_ENTRIES_FOR_ITER);
// Jump over END_FOR instruction.
JUMPBY(oparg + 1);
DISPATCH();
@ -2682,10 +2663,9 @@ dummy_func(
gen->gi_frame_state = FRAME_EXECUTING;
gen->gi_exc_state.previous_item = tstate->exc_info;
tstate->exc_info = &gen->gi_exc_state;
SKIP_OVER(INLINE_CACHE_ENTRIES_FOR_ITER);
assert(next_instr[oparg].op.code == END_FOR ||
next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
assert(1 + INLINE_CACHE_ENTRIES_FOR_ITER == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_FOR_ITER == next_instr - this_instr);
frame->return_offset = (uint16_t)(1 + INLINE_CACHE_ENTRIES_FOR_ITER + oparg);
DISPATCH_INLINED(gen_frame);
}
@ -2912,7 +2892,7 @@ dummy_func(
unused/2 +
_LOAD_ATTR_METHOD_LAZY_DICT;
inst(INSTRUMENTED_CALL, ( -- )) {
inst(INSTRUMENTED_CALL, (unused/3 -- )) {
int is_meth = PEEK(oparg + 1) != NULL;
int total_args = oparg + is_meth;
PyObject *function = PEEK(oparg + 2);
@ -2920,10 +2900,9 @@ dummy_func(
&_PyInstrumentation_MISSING : PEEK(total_args);
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, function, arg);
frame, this_instr, function, arg);
ERROR_IF(err, error);
_PyCallCache *cache = (_PyCallCache *)next_instr;
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
INCREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
GO_TO_INSTRUCTION(CALL);
}
@ -2959,14 +2938,13 @@ dummy_func(
total_args++;
}
#if ENABLE_SPECIALIZATION
_PyCallCache *cache = (_PyCallCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_Call(callable, next_instr, total_args);
DISPATCH_SAME_OPARG();
}
STAT_INC(CALL, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
if (self_or_null == NULL && Py_TYPE(callable) == &PyMethod_Type) {
args--;
@ -2996,8 +2974,7 @@ dummy_func(
if (new_frame == NULL) {
goto error;
}
SKIP_OVER(INLINE_CACHE_ENTRIES_CALL);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
DISPATCH_INLINED(new_frame);
}
@ -3012,12 +2989,12 @@ dummy_func(
if (res == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, callable, arg);
frame, this_instr, callable, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, callable, arg);
frame, this_instr, callable, arg);
if (err < 0) {
Py_CLEAR(res);
}
@ -3151,8 +3128,7 @@ dummy_func(
}
// Manipulate stack and cache directly since we leave using DISPATCH_INLINED().
STACK_SHRINK(oparg + 2);
SKIP_OVER(INLINE_CACHE_ENTRIES_CALL);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
DISPATCH_INLINED(new_frame);
}
@ -3200,7 +3176,7 @@ dummy_func(
* 2. Pushes a shim frame to the frame stack (to cleanup after ``__init__``)
* 3. Pushes the frame for ``__init__`` to the frame stack
* */
_PyCallCache *cache = (_PyCallCache *)next_instr;
_PyCallCache *cache = (_PyCallCache *)&this_instr[1];
DEOPT_IF(null != NULL);
DEOPT_IF(!PyType_Check(callable));
PyTypeObject *tp = (PyTypeObject *)callable;
@ -3229,8 +3205,7 @@ dummy_func(
for (int i = 0; i < oparg; i++) {
init_frame->localsplus[i+1] = args[i];
}
SKIP_OVER(INLINE_CACHE_ENTRIES_CALL);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - frame->instr_ptr);
assert(1 + INLINE_CACHE_ENTRIES_CALL == next_instr - this_instr);
frame->return_offset = 1 + INLINE_CACHE_ENTRIES_CALL;
STACK_SHRINK(oparg+2);
_PyFrame_SetStackPointer(frame, stack_pointer);
@ -3426,9 +3401,9 @@ dummy_func(
Py_DECREF(self);
Py_DECREF(callable);
STACK_SHRINK(3);
// CALL + POP_TOP
SKIP_OVER(INLINE_CACHE_ENTRIES_CALL + 1);
assert(next_instr[-1].op.code == POP_TOP);
// Skip POP_TOP
assert(next_instr->op.code == POP_TOP);
SKIP_OVER(1);
DISPATCH();
}
@ -3558,7 +3533,7 @@ dummy_func(
: PEEK(total_args + 1);
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr - 1, function, arg);
frame, this_instr, function, arg);
ERROR_IF(err, error);
GO_TO_INSTRUCTION(CALL_KW);
}
@ -3600,7 +3575,7 @@ dummy_func(
if (new_frame == NULL) {
goto error;
}
assert(next_instr - frame->instr_ptr == 1);
assert(next_instr - this_instr == 1);
frame->return_offset = 1;
DISPATCH_INLINED(new_frame);
}
@ -3615,12 +3590,12 @@ dummy_func(
if (res == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, callable, arg);
frame, this_instr, callable, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, callable, arg);
frame, this_instr, callable, arg);
if (err < 0) {
Py_CLEAR(res);
}
@ -3663,18 +3638,18 @@ dummy_func(
PyTuple_GET_ITEM(callargs, 0) : Py_None;
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_CALL,
frame, next_instr-1, func, arg);
frame, this_instr, func, arg);
if (err) goto error;
result = PyObject_Call(func, callargs, kwargs);
if (result == NULL) {
_Py_call_instrumentation_exc2(
tstate, PY_MONITORING_EVENT_C_RAISE,
frame, next_instr-1, func, arg);
frame, this_instr, func, arg);
}
else {
int err = _Py_call_instrumentation_2args(
tstate, PY_MONITORING_EVENT_C_RETURN,
frame, next_instr-1, func, arg);
frame, this_instr, func, arg);
if (err < 0) {
Py_CLEAR(result);
}
@ -3697,7 +3672,7 @@ dummy_func(
if (new_frame == NULL) {
goto error;
}
assert(next_instr - frame->instr_ptr == 1);
assert(next_instr - this_instr == 1);
frame->return_offset = 1;
DISPATCH_INLINED(new_frame);
}
@ -3818,14 +3793,13 @@ dummy_func(
inst(BINARY_OP, (unused/1, lhs, rhs -- res)) {
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
DISPATCH_SAME_OPARG();
}
STAT_INC(BINARY_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
assert(NB_ADD <= oparg);
assert(oparg <= NB_INPLACE_XOR);
@ -3842,12 +3816,11 @@ dummy_func(
inst(INSTRUMENTED_INSTRUCTION, ( -- )) {
int next_opcode = _Py_call_instrumentation_instruction(
tstate, frame, next_instr-1);
tstate, frame, this_instr);
ERROR_IF(next_opcode < 0, error);
next_instr--;
next_instr = this_instr;
if (_PyOpcode_Caches[next_opcode]) {
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)(next_instr+1);
INCREMENT_ADAPTIVE_COUNTER(cache->counter);
INCREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
}
assert(next_opcode > 0 && next_opcode < 256);
opcode = next_opcode;
@ -3855,43 +3828,38 @@ dummy_func(
}
inst(INSTRUMENTED_JUMP_FORWARD, ( -- )) {
_Py_CODEUNIT *here = frame->instr_ptr;
INSTRUMENTED_JUMP(here, next_instr + oparg, PY_MONITORING_EVENT_JUMP);
INSTRUMENTED_JUMP(this_instr, next_instr + oparg, PY_MONITORING_EVENT_JUMP);
}
inst(INSTRUMENTED_JUMP_BACKWARD, ( -- )) {
_Py_CODEUNIT *here = frame->instr_ptr;
CHECK_EVAL_BREAKER();
INSTRUMENTED_JUMP(here, next_instr + 1 - oparg, PY_MONITORING_EVENT_JUMP);
INSTRUMENTED_JUMP(this_instr, next_instr + 1 - oparg, PY_MONITORING_EVENT_JUMP);
}
inst(INSTRUMENTED_POP_JUMP_IF_TRUE, (unused/1 -- )) {
PyObject *cond = POP();
assert(PyBool_Check(cond));
_Py_CODEUNIT *here = frame->instr_ptr;
int flag = Py_IsTrue(cond);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | flag;
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
#endif
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_FALSE, (unused/1 -- )) {
PyObject *cond = POP();
assert(PyBool_Check(cond));
_Py_CODEUNIT *here = frame->instr_ptr;
int flag = Py_IsFalse(cond);
int offset = flag * oparg;
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | flag;
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
#endif
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NONE, (unused/1 -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = frame->instr_ptr;
int flag = Py_IsNone(value);
int offset;
if (flag) {
@ -3902,14 +3870,13 @@ dummy_func(
offset = 0;
}
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | flag;
this_instr[1].cache = (this_instr[1].cache << 1) | flag;
#endif
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(INSTRUMENTED_POP_JUMP_IF_NOT_NONE, (unused/1 -- )) {
PyObject *value = POP();
_Py_CODEUNIT *here = frame->instr_ptr;
int offset;
int nflag = Py_IsNone(value);
if (nflag) {
@ -3920,9 +3887,9 @@ dummy_func(
offset = oparg;
}
#if ENABLE_SPECIALIZATION
next_instr->cache = (next_instr->cache << 1) | !nflag;
this_instr[1].cache = (this_instr[1].cache << 1) | !nflag;
#endif
INSTRUMENTED_JUMP(here, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
INSTRUMENTED_JUMP(this_instr, next_instr + offset, PY_MONITORING_EVENT_BRANCH);
}
inst(EXTENDED_ARG, ( -- )) {
@ -3969,7 +3936,7 @@ dummy_func(
op(_SAVE_RETURN_OFFSET, (--)) {
#if TIER_ONE
frame->return_offset = (uint16_t)(next_instr - frame->instr_ptr);
frame->return_offset = (uint16_t)(next_instr - this_instr);
#endif
#if TIER_TWO
frame->return_offset = oparg;

View file

@ -867,7 +867,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int
monitor_raise(tstate, frame, next_instr-1);
exception_unwind:
{
/* We can't use frame->f_lasti here, as RERAISE may have set it */
/* We can't use frame->instr_ptr here, as RERAISE may have set it */
int offset = INSTR_OFFSET()-1;
int level, handler, lasti;
if (get_exception_handler(_PyFrame_GetCode(frame), offset, &level, &handler, &lasti) == 0) {

View file

@ -60,25 +60,21 @@
#endif
#ifdef Py_STATS
#define INSTRUCTION_START(op) \
#define INSTRUCTION_STATS(op) \
do { \
frame->instr_ptr = next_instr++; \
OPCODE_EXE_INC(op); \
if (_Py_stats) _Py_stats->opcode_stats[lastopcode].pair_count[op]++; \
lastopcode = op; \
} while (0)
#else
#define INSTRUCTION_START(op) \
do { \
frame->instr_ptr = next_instr++; \
} while(0)
#define INSTRUCTION_STATS(op) ((void)0)
#endif
#if USE_COMPUTED_GOTOS
# define TARGET(op) TARGET_##op: INSTRUCTION_START(op);
# define TARGET(op) TARGET_##op:
# define DISPATCH_GOTO() goto *opcode_targets[opcode]
#else
# define TARGET(op) case op: TARGET_##op: INSTRUCTION_START(op);
# define TARGET(op) case op: TARGET_##op:
# define DISPATCH_GOTO() goto dispatch_opcode
#endif

View file

@ -119,14 +119,13 @@
PyObject *res;
value = stack_pointer[-1];
#if ENABLE_SPECIALIZATION
_PyToBoolCache *cache = (_PyToBoolCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_ToBool(value, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(TO_BOOL, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
int err = PyObject_IsTrue(value);
Py_DECREF(value);
@ -380,14 +379,13 @@
sub = stack_pointer[-1];
container = stack_pointer[-2];
#if ENABLE_SPECIALIZATION
_PyBinarySubscrCache *cache = (_PyBinarySubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_BinarySubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(BINARY_SUBSCR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
res = PyObject_GetItem(container, sub);
Py_DECREF(container);
@ -574,14 +572,13 @@
container = stack_pointer[-2];
v = stack_pointer[-3];
#if ENABLE_SPECIALIZATION
_PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
@ -900,14 +897,13 @@
PyObject *seq;
seq = stack_pointer[-1];
#if ENABLE_SPECIALIZATION
_PyUnpackSequenceCache *cache = (_PyUnpackSequenceCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_UnpackSequence(seq, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(UNPACK_SEQUENCE, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject **top = stack_pointer + oparg - 1;
int res = _PyEval_UnpackIterable(tstate, seq, oparg, -1, top);
@ -989,15 +985,14 @@
owner = stack_pointer[-1];
v = stack_pointer[-2];
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
next_instr--;
next_instr = this_instr;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
int err = PyObject_SetAttr(owner, name, v);
@ -1133,15 +1128,14 @@
PyObject *res;
PyObject *null = NULL;
#if ENABLE_SPECIALIZATION
_PyLoadGlobalCache *cache = (_PyLoadGlobalCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
next_instr--;
next_instr = this_instr;
_Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(LOAD_GLOBAL, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
if (PyDict_CheckExact(GLOBALS())
@ -1624,15 +1618,14 @@
PyObject *self_or_null = NULL;
owner = stack_pointer[-1];
#if ENABLE_SPECIALIZATION
_PyAttrCache *cache = (_PyAttrCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg>>1);
next_instr--;
next_instr = this_instr;
_Py_Specialize_LoadAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(LOAD_ATTR, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
if (oparg & 1) {
@ -1887,14 +1880,13 @@
right = stack_pointer[-1];
left = stack_pointer[-2];
#if ENABLE_SPECIALIZATION
_PyCompareOpCache *cache = (_PyCompareOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_CompareOp(left, right, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
STAT_INC(COMPARE_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
assert((oparg >> 5) <= Py_GE);
res = PyObject_RichCompare(left, right, oparg >> 5);
@ -3209,14 +3201,13 @@
rhs = stack_pointer[-1];
lhs = stack_pointer[-2];
#if ENABLE_SPECIALIZATION
_PyBinaryOpCache *cache = (_PyBinaryOpCache *)next_instr;
if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
if (ADAPTIVE_COUNTER_IS_ZERO(this_instr[1].cache)) {
next_instr = this_instr;
_Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY);
DISPATCH_SAME_OPARG();
}
STAT_INC(BINARY_OP, deferred);
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
DECREMENT_ADAPTIVE_COUNTER(this_instr[1].cache);
#endif /* ENABLE_SPECIALIZATION */
assert(NB_ADD <= oparg);
assert(oparg <= NB_INPLACE_XOR);
@ -3275,7 +3266,7 @@
case _SAVE_RETURN_OFFSET: {
#if TIER_ONE
frame->return_offset = (uint16_t)(next_instr - frame->instr_ptr);
frame->return_offset = (uint16_t)(next_instr - this_instr);
#endif
#if TIER_TWO
frame->return_offset = oparg;

File diff suppressed because it is too large Load diff

View file

@ -432,7 +432,6 @@ def report_non_viable_uops(self, jsonfile: str) -> None:
"LOAD_FAST_LOAD_FAST",
"LOAD_CONST_LOAD_FAST",
"STORE_FAST_STORE_FAST",
"_BINARY_OP_INPLACE_ADD_UNICODE",
"POP_JUMP_IF_TRUE",
"POP_JUMP_IF_FALSE",
"_ITER_JUMP_LIST",

View file

@ -61,6 +61,7 @@ class Instruction:
# Computed by constructor
always_exits: str # If the block always exits, its last line; else ""
has_deopt: bool
needs_this_instr: bool
cache_offset: int
cache_effects: list[parsing.CacheEffect]
input_effects: list[StackEffect]
@ -87,6 +88,7 @@ def __init__(self, inst: parsing.InstDef):
effect for effect in inst.inputs if isinstance(effect, parsing.CacheEffect)
]
self.cache_offset = sum(c.size for c in self.cache_effects)
self.needs_this_instr = variable_used(self.inst, "this_instr") or any(c.name != UNUSED for c in self.cache_effects)
self.input_effects = [
effect for effect in inst.inputs if isinstance(effect, StackEffect)
]
@ -164,7 +166,8 @@ def write_body(
func = f"read_u{bits}"
if tier == TIER_ONE:
out.emit(
f"{typ}{ceffect.name} = {func}(&next_instr[{active.offset}].cache);"
f"{typ}{ceffect.name} = "
f"{func}(&this_instr[{active.offset + 1}].cache);"
)
else:
out.emit(f"{typ}{ceffect.name} = ({typ.strip()})operand;")

View file

@ -365,8 +365,17 @@ def write_macro_instr(mac: MacroInstruction, out: Formatter) -> None:
]
out.emit("")
with out.block(f"TARGET({mac.name})"):
needs_this = any(part.instr.needs_this_instr for part in parts)
if needs_this and not mac.predicted:
out.emit(f"_Py_CODEUNIT *this_instr = frame->instr_ptr = next_instr;")
else:
out.emit(f"frame->instr_ptr = next_instr;")
out.emit(f"next_instr += {mac.cache_offset+1};")
out.emit(f"INSTRUCTION_STATS({mac.name});")
if mac.predicted:
out.emit(f"PREDICTED({mac.name});")
if needs_this:
out.emit(f"_Py_CODEUNIT *this_instr = next_instr - {mac.cache_offset+1};")
out.static_assert_family_size(mac.name, mac.family, mac.cache_offset)
try:
next_instr_is_set = write_components(
@ -375,8 +384,6 @@ def write_macro_instr(mac: MacroInstruction, out: Formatter) -> None:
except AssertionError as err:
raise AssertionError(f"Error writing macro {mac.name}") from err
if not parts[-1].instr.always_exits:
if not next_instr_is_set and mac.cache_offset:
out.emit(f"next_instr += {mac.cache_offset};")
if parts[-1].instr.check_eval_breaker:
out.emit("CHECK_EVAL_BREAKER();")
out.emit("DISPATCH();")
@ -450,8 +457,6 @@ def write_components(
if mgr.instr.name == "_SAVE_RETURN_OFFSET":
next_instr_is_set = True
if cache_offset:
out.emit(f"next_instr += {cache_offset};")
if tier == TIER_ONE:
assert_no_pokes(managers)