cpython/Python/optimizer_cases.c.h
Ken Jin 22b0de2755
gh-117139: Convert the evaluation stack to stack refs (#118450)
This PR sets up tagged pointers for CPython.

The general idea is to create a separate struct _PyStackRef for everything on the evaluation stack to store the bits. This forces the C compiler to warn us if we try to cast things or pull things out of the struct directly.

Only for free threading: We tag the low bit if something is deferred - that means we skip incref and decref operations on it. This behavior may change in the future if Mark's plans to defer all objects in the interpreter loop pans out.

This implies a strict stack reference discipline is required. ALL incref and decref operations on stackrefs must use the stackref variants. It is unsafe to untag something then do normal incref/decref ops on it.

The new incref and decref variants are called dup and close. They mimic a "handle" API operating on these stackrefs.

Please read Include/internal/pycore_stackref.h for more information!

---------

Co-authored-by: Mark Shannon <9448417+markshannon@users.noreply.github.com>
2024-06-27 03:10:43 +08:00

2217 lines
67 KiB
C
Generated

// This file is generated by Tools/cases_generator/optimizer_generator.py
// from:
// Python/optimizer_bytecodes.c
// Do not edit!
case _NOP: {
break;
}
case _RESUME_CHECK: {
break;
}
/* _INSTRUMENTED_RESUME is not a viable micro-op for tier 2 */
case _LOAD_FAST_CHECK: {
_Py_UopsSymbol *value;
value = GETLOCAL(oparg);
// We guarantee this will error - just bail and don't optimize it.
if (sym_is_null(value)) {
ctx->done = true;
}
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_FAST: {
_Py_UopsSymbol *value;
value = GETLOCAL(oparg);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_FAST_AND_CLEAR: {
_Py_UopsSymbol *value;
value = GETLOCAL(oparg);
_Py_UopsSymbol *temp = sym_new_null(ctx);
GETLOCAL(oparg) = temp;
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_CONST: {
_Py_UopsSymbol *value;
PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE;
REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val);
value = sym_new_const(ctx, val);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_FAST: {
_Py_UopsSymbol *value;
value = stack_pointer[-1];
GETLOCAL(oparg) = value;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _POP_TOP: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _PUSH_NULL: {
_Py_UopsSymbol *res;
res = sym_new_null(ctx);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _END_SEND: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[-2] = value;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _UNARY_NEGATIVE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _UNARY_NOT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _TO_BOOL: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
res = sym_new_type(ctx, &PyBool_Type);
}
stack_pointer[-1] = res;
break;
}
case _TO_BOOL_BOOL: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
sym_set_type(value, &PyBool_Type);
res = value;
}
stack_pointer[-1] = res;
break;
}
case _TO_BOOL_INT: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
sym_set_type(value, &PyLong_Type);
res = sym_new_type(ctx, &PyBool_Type);
}
stack_pointer[-1] = res;
break;
}
case _TO_BOOL_LIST: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
sym_set_type(value, &PyList_Type);
res = sym_new_type(ctx, &PyBool_Type);
}
stack_pointer[-1] = res;
break;
}
case _TO_BOOL_NONE: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
sym_set_const(value, Py_None);
res = sym_new_const(ctx, Py_False);
}
stack_pointer[-1] = res;
break;
}
case _TO_BOOL_STR: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *res;
value = stack_pointer[-1];
if (!optimize_to_bool(this_instr, ctx, value, &res)) {
res = sym_new_type(ctx, &PyBool_Type);
sym_set_type(value, &PyUnicode_Type);
}
stack_pointer[-1] = res;
break;
}
case _REPLACE_WITH_TRUE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _UNARY_INVERT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _GUARD_BOTH_INT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_matches_type(left, &PyLong_Type)) {
if (sym_matches_type(right, &PyLong_Type)) {
REPLACE_OP(this_instr, _NOP, 0, 0);
}
else {
REPLACE_OP(this_instr, _GUARD_TOS_INT, 0, 0);
}
}
else {
if (sym_matches_type(right, &PyLong_Type)) {
REPLACE_OP(this_instr, _GUARD_NOS_INT, 0, 0);
}
}
sym_set_type(left, &PyLong_Type);
sym_set_type(right, &PyLong_Type);
break;
}
case _GUARD_NOS_INT: {
break;
}
case _GUARD_TOS_INT: {
break;
}
case _BINARY_OP_MULTIPLY_INT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type))
{
assert(PyLong_CheckExact(sym_get_const(left)));
assert(PyLong_CheckExact(sym_get_const(right)));
PyObject *temp = _PyLong_Multiply((PyLongObject *)sym_get_const(left),
(PyLongObject *)sym_get_const(right));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and add tests!
}
else {
res = sym_new_type(ctx, &PyLong_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_OP_ADD_INT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type))
{
assert(PyLong_CheckExact(sym_get_const(left)));
assert(PyLong_CheckExact(sym_get_const(right)));
PyObject *temp = _PyLong_Add((PyLongObject *)sym_get_const(left),
(PyLongObject *)sym_get_const(right));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and add tests!
}
else {
res = sym_new_type(ctx, &PyLong_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_OP_SUBTRACT_INT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyLong_Type) && sym_matches_type(right, &PyLong_Type))
{
assert(PyLong_CheckExact(sym_get_const(left)));
assert(PyLong_CheckExact(sym_get_const(right)));
PyObject *temp = _PyLong_Subtract((PyLongObject *)sym_get_const(left),
(PyLongObject *)sym_get_const(right));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and add tests!
}
else {
res = sym_new_type(ctx, &PyLong_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_BOTH_FLOAT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_matches_type(left, &PyFloat_Type)) {
if (sym_matches_type(right, &PyFloat_Type)) {
REPLACE_OP(this_instr, _NOP, 0, 0);
}
else {
REPLACE_OP(this_instr, _GUARD_TOS_FLOAT, 0, 0);
}
}
else {
if (sym_matches_type(right, &PyFloat_Type)) {
REPLACE_OP(this_instr, _GUARD_NOS_FLOAT, 0, 0);
}
}
sym_set_type(left, &PyFloat_Type);
sym_set_type(right, &PyFloat_Type);
break;
}
case _GUARD_NOS_FLOAT: {
break;
}
case _GUARD_TOS_FLOAT: {
break;
}
case _BINARY_OP_MULTIPLY_FLOAT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type))
{
assert(PyFloat_CheckExact(sym_get_const(left)));
assert(PyFloat_CheckExact(sym_get_const(right)));
PyObject *temp = PyFloat_FromDouble(
PyFloat_AS_DOUBLE(sym_get_const(left)) *
PyFloat_AS_DOUBLE(sym_get_const(right)));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and update tests!
}
else {
res = sym_new_type(ctx, &PyFloat_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_OP_ADD_FLOAT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type))
{
assert(PyFloat_CheckExact(sym_get_const(left)));
assert(PyFloat_CheckExact(sym_get_const(right)));
PyObject *temp = PyFloat_FromDouble(
PyFloat_AS_DOUBLE(sym_get_const(left)) +
PyFloat_AS_DOUBLE(sym_get_const(right)));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and update tests!
}
else {
res = sym_new_type(ctx, &PyFloat_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_OP_SUBTRACT_FLOAT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyFloat_Type) && sym_matches_type(right, &PyFloat_Type))
{
assert(PyFloat_CheckExact(sym_get_const(left)));
assert(PyFloat_CheckExact(sym_get_const(right)));
PyObject *temp = PyFloat_FromDouble(
PyFloat_AS_DOUBLE(sym_get_const(left)) -
PyFloat_AS_DOUBLE(sym_get_const(right)));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
// TODO gh-115506:
// replace opcode with constant propagated one and update tests!
}
else {
res = sym_new_type(ctx, &PyFloat_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_BOTH_UNICODE: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_matches_type(left, &PyUnicode_Type) &&
sym_matches_type(right, &PyUnicode_Type)) {
REPLACE_OP(this_instr, _NOP, 0 ,0);
}
sym_set_type(left, &PyUnicode_Type);
sym_set_type(left, &PyUnicode_Type);
break;
}
case _BINARY_OP_ADD_UNICODE: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
if (sym_is_const(left) && sym_is_const(right) &&
sym_matches_type(left, &PyUnicode_Type) && sym_matches_type(right, &PyUnicode_Type)) {
PyObject *temp = PyUnicode_Concat(sym_get_const(left), sym_get_const(right));
if (temp == NULL) {
goto error;
}
res = sym_new_const(ctx, temp);
Py_DECREF(temp);
}
else {
res = sym_new_type(ctx, &PyUnicode_Type);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SUBSCR: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SLICE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_SLICE: {
stack_pointer += -4;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SUBSCR_LIST_INT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SUBSCR_STR_INT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SUBSCR_TUPLE_INT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_SUBSCR_DICT: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _BINARY_SUBSCR_GETITEM is not a viable micro-op for tier 2 */
case _LIST_APPEND: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _SET_ADD: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_SUBSCR: {
stack_pointer += -3;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_SUBSCR_LIST_INT: {
stack_pointer += -3;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_SUBSCR_DICT: {
stack_pointer += -3;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DELETE_SUBSCR: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_INTRINSIC_1: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _CALL_INTRINSIC_2: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _RETURN_VALUE: {
_Py_UopsSymbol *retval;
_Py_UopsSymbol *res;
retval = stack_pointer[-1];
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
ctx->frame->stack_pointer = stack_pointer;
frame_pop(ctx);
stack_pointer = ctx->frame->stack_pointer;
res = retval;
/* Stack space handling */
assert(corresponding_check_stack == NULL);
assert(co != NULL);
int framesize = co->co_framesize;
assert(framesize > 0);
assert(framesize <= curr_space);
curr_space -= framesize;
co = get_code(this_instr);
if (co == NULL) {
// might be impossible, but bailing is still safe
ctx->done = true;
}
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _INSTRUMENTED_RETURN_VALUE is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_RETURN_CONST is not a viable micro-op for tier 2 */
case _GET_AITER: {
_Py_UopsSymbol *iter;
iter = sym_new_not_null(ctx);
stack_pointer[-1] = iter;
break;
}
case _GET_ANEXT: {
_Py_UopsSymbol *awaitable;
awaitable = sym_new_not_null(ctx);
stack_pointer[0] = awaitable;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GET_AWAITABLE: {
_Py_UopsSymbol *iter;
iter = sym_new_not_null(ctx);
stack_pointer[-1] = iter;
break;
}
/* _SEND is not a viable micro-op for tier 2 */
/* _SEND_GEN is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_YIELD_VALUE is not a viable micro-op for tier 2 */
case _YIELD_VALUE: {
_Py_UopsSymbol *res;
res = sym_new_unknown(ctx);
stack_pointer[-1] = res;
break;
}
case _POP_EXCEPT: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_COMMON_CONSTANT: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_BUILD_CLASS: {
_Py_UopsSymbol *bc;
bc = sym_new_not_null(ctx);
stack_pointer[0] = bc;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_NAME: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DELETE_NAME: {
break;
}
case _UNPACK_SEQUENCE: {
_Py_UopsSymbol *seq;
_Py_UopsSymbol **values;
seq = stack_pointer[-1];
values = &stack_pointer[-1];
/* This has to be done manually */
(void)seq;
for (int i = 0; i < oparg; i++) {
values[i] = sym_new_unknown(ctx);
}
stack_pointer += -1 + oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _UNPACK_SEQUENCE_TWO_TUPLE: {
_Py_UopsSymbol *val1;
_Py_UopsSymbol *val0;
val1 = sym_new_not_null(ctx);
val0 = sym_new_not_null(ctx);
stack_pointer[-1] = val1;
stack_pointer[0] = val0;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _UNPACK_SEQUENCE_TUPLE: {
_Py_UopsSymbol **values;
values = &stack_pointer[-1];
for (int _i = oparg; --_i >= 0;) {
values[_i] = sym_new_not_null(ctx);
}
stack_pointer += -1 + oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _UNPACK_SEQUENCE_LIST: {
_Py_UopsSymbol **values;
values = &stack_pointer[-1];
for (int _i = oparg; --_i >= 0;) {
values[_i] = sym_new_not_null(ctx);
}
stack_pointer += -1 + oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _UNPACK_EX: {
_Py_UopsSymbol *seq;
_Py_UopsSymbol **values;
seq = stack_pointer[-1];
values = &stack_pointer[-1];
/* This has to be done manually */
(void)seq;
int totalargs = (oparg & 0xFF) + (oparg >> 8) + 1;
for (int i = 0; i < totalargs; i++) {
values[i] = sym_new_unknown(ctx);
}
stack_pointer += (oparg >> 8) + (oparg & 0xFF);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_ATTR: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DELETE_ATTR: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_GLOBAL: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DELETE_GLOBAL: {
break;
}
case _LOAD_LOCALS: {
_Py_UopsSymbol *locals;
locals = sym_new_not_null(ctx);
stack_pointer[0] = locals;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _LOAD_FROM_DICT_OR_GLOBALS is not a viable micro-op for tier 2 */
/* _LOAD_NAME is not a viable micro-op for tier 2 */
case _LOAD_GLOBAL: {
_Py_UopsSymbol *res;
_Py_UopsSymbol *null = NULL;
res = sym_new_not_null(ctx);
null = sym_new_null(ctx);
stack_pointer[0] = res;
if (oparg & 1) stack_pointer[1] = null;
stack_pointer += 1 + (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_GLOBALS_VERSION: {
break;
}
case _GUARD_BUILTINS_VERSION: {
break;
}
case _LOAD_GLOBAL_MODULE: {
_Py_UopsSymbol *res;
_Py_UopsSymbol *null = NULL;
res = sym_new_not_null(ctx);
null = sym_new_null(ctx);
stack_pointer[0] = res;
if (oparg & 1) stack_pointer[1] = null;
stack_pointer += 1 + (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_GLOBAL_BUILTINS: {
_Py_UopsSymbol *res;
_Py_UopsSymbol *null = NULL;
res = sym_new_not_null(ctx);
null = sym_new_null(ctx);
stack_pointer[0] = res;
if (oparg & 1) stack_pointer[1] = null;
stack_pointer += 1 + (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DELETE_FAST: {
break;
}
case _MAKE_CELL: {
break;
}
case _DELETE_DEREF: {
break;
}
case _LOAD_FROM_DICT_OR_DEREF: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[-1] = value;
break;
}
case _LOAD_DEREF: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_DEREF: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COPY_FREE_VARS: {
break;
}
case _BUILD_STRING: {
_Py_UopsSymbol *str;
str = sym_new_not_null(ctx);
stack_pointer[-oparg] = str;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BUILD_TUPLE: {
_Py_UopsSymbol *tup;
tup = sym_new_not_null(ctx);
stack_pointer[-oparg] = tup;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BUILD_LIST: {
_Py_UopsSymbol *list;
list = sym_new_not_null(ctx);
stack_pointer[-oparg] = list;
stack_pointer += 1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LIST_EXTEND: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _SET_UPDATE: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _BUILD_SET is not a viable micro-op for tier 2 */
case _BUILD_MAP: {
_Py_UopsSymbol *map;
map = sym_new_not_null(ctx);
stack_pointer[-oparg*2] = map;
stack_pointer += 1 - oparg*2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _SETUP_ANNOTATIONS: {
break;
}
case _BUILD_CONST_KEY_MAP: {
_Py_UopsSymbol *map;
map = sym_new_not_null(ctx);
stack_pointer[-1 - oparg] = map;
stack_pointer += -oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DICT_UPDATE: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _DICT_MERGE: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _MAP_ADD: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _INSTRUMENTED_LOAD_SUPER_ATTR is not a viable micro-op for tier 2 */
case _LOAD_SUPER_ATTR_ATTR: {
_Py_UopsSymbol *attr_st;
attr_st = sym_new_not_null(ctx);
stack_pointer[-3] = attr_st;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_SUPER_ATTR_METHOD: {
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self_or_null;
attr = sym_new_not_null(ctx);
self_or_null = sym_new_not_null(ctx);
stack_pointer[-3] = attr;
stack_pointer[-2] = self_or_null;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self_or_null = NULL;
owner = stack_pointer[-1];
(void)owner;
attr = sym_new_not_null(ctx);
if (oparg & 1) {
self_or_null = sym_new_unknown(ctx);
}
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = self_or_null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_TYPE_VERSION: {
_Py_UopsSymbol *owner;
owner = stack_pointer[-1];
uint32_t type_version = (uint32_t)this_instr->operand;
assert(type_version);
if (sym_matches_type_version(owner, type_version)) {
REPLACE_OP(this_instr, _NOP, 0, 0);
} else {
// add watcher so that whenever the type changes we invalidate this
PyTypeObject *type = _PyType_LookupByVersion(type_version);
// if the type is null, it was not found in the cache (there was a conflict)
// with the key, in which case we can't trust the version
if (type) {
// if the type version was set properly, then add a watcher
// if it wasn't this means that the type version was previously set to something else
// and we set the owner to bottom, so we don't need to add a watcher because we must have
// already added one earlier.
if (sym_set_type_version(owner, type_version)) {
PyType_Watch(TYPE_WATCHER_ID, (PyObject *)type);
_Py_BloomFilter_Add(dependencies, type);
}
}
}
break;
}
case _CHECK_MANAGED_OBJECT_HAS_VALUES: {
break;
}
case _LOAD_ATTR_INSTANCE_VALUE: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
owner = stack_pointer[-1];
uint16_t index = (uint16_t)this_instr->operand;
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)index;
(void)owner;
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_ATTR_MODULE: {
_Py_UopsSymbol *owner;
owner = stack_pointer[-1];
uint32_t dict_version = (uint32_t)this_instr->operand;
(void)dict_version;
if (sym_is_const(owner)) {
PyObject *cnst = sym_get_const(owner);
if (PyModule_CheckExact(cnst)) {
PyModuleObject *mod = (PyModuleObject *)cnst;
PyObject *dict = mod->md_dict;
uint64_t watched_mutations = get_mutations(dict);
if (watched_mutations < _Py_MAX_ALLOWED_GLOBALS_MODIFICATIONS) {
PyDict_Watch(GLOBALS_WATCHER_ID, dict);
_Py_BloomFilter_Add(dependencies, dict);
this_instr->opcode = _NOP;
}
}
}
break;
}
case _LOAD_ATTR_MODULE: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
owner = stack_pointer[-1];
uint16_t index = (uint16_t)this_instr->operand;
(void)index;
null = sym_new_null(ctx);
attr = NULL;
if (this_instr[-1].opcode == _NOP) {
// Preceding _CHECK_ATTR_MODULE was removed: mod is const and dict is watched.
assert(sym_is_const(owner));
PyModuleObject *mod = (PyModuleObject *)sym_get_const(owner);
assert(PyModule_CheckExact(mod));
PyObject *dict = mod->md_dict;
PyObject *res = convert_global_to_const(this_instr, dict);
if (res != NULL) {
this_instr[-1].opcode = _POP_TOP;
attr = sym_new_const(ctx, res);
}
}
if (attr == NULL) {
/* No conversion made. We don't know what `attr` is. */
attr = sym_new_not_null(ctx);
}
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_ATTR_WITH_HINT: {
break;
}
case _LOAD_ATTR_WITH_HINT: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
owner = stack_pointer[-1];
uint16_t hint = (uint16_t)this_instr->operand;
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)hint;
(void)owner;
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR_SLOT: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
owner = stack_pointer[-1];
uint16_t index = (uint16_t)this_instr->operand;
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)index;
(void)owner;
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_ATTR_CLASS: {
break;
}
case _LOAD_ATTR_CLASS: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *null = NULL;
owner = stack_pointer[-1];
PyObject *descr = (PyObject *)this_instr->operand;
attr = sym_new_not_null(ctx);
null = sym_new_null(ctx);
(void)descr;
(void)owner;
stack_pointer[-1] = attr;
if (oparg & 1) stack_pointer[0] = null;
stack_pointer += (oparg & 1);
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _LOAD_ATTR_PROPERTY is not a viable micro-op for tier 2 */
/* _LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN is not a viable micro-op for tier 2 */
case _GUARD_DORV_NO_DICT: {
break;
}
case _STORE_ATTR_INSTANCE_VALUE: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_ATTR_WITH_HINT: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _STORE_ATTR_SLOT: {
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COMPARE_OP: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
if (oparg & 16) {
res = sym_new_type(ctx, &PyBool_Type);
}
else {
res = _Py_uop_sym_new_not_null(ctx);
}
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COMPARE_OP_FLOAT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
res = sym_new_type(ctx, &PyBool_Type);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COMPARE_OP_INT: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
res = sym_new_type(ctx, &PyBool_Type);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COMPARE_OP_STR: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
res = sym_new_type(ctx, &PyBool_Type);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _IS_OP: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
res = sym_new_type(ctx, &PyBool_Type);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CONTAINS_OP: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
(void)left;
(void)right;
res = sym_new_type(ctx, &PyBool_Type);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CONTAINS_OP_SET: {
_Py_UopsSymbol *b;
b = sym_new_not_null(ctx);
stack_pointer[-2] = b;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CONTAINS_OP_DICT: {
_Py_UopsSymbol *b;
b = sym_new_not_null(ctx);
stack_pointer[-2] = b;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_EG_MATCH: {
_Py_UopsSymbol *rest;
_Py_UopsSymbol *match;
rest = sym_new_not_null(ctx);
match = sym_new_not_null(ctx);
stack_pointer[-2] = rest;
stack_pointer[-1] = match;
break;
}
case _CHECK_EXC_MATCH: {
_Py_UopsSymbol *b;
b = sym_new_not_null(ctx);
stack_pointer[-1] = b;
break;
}
/* _POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */
/* _POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */
case _IS_NONE: {
_Py_UopsSymbol *b;
b = sym_new_not_null(ctx);
stack_pointer[-1] = b;
break;
}
case _GET_LEN: {
_Py_UopsSymbol *len;
len = sym_new_not_null(ctx);
stack_pointer[0] = len;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _MATCH_CLASS: {
_Py_UopsSymbol *attrs;
attrs = sym_new_not_null(ctx);
stack_pointer[-3] = attrs;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _MATCH_MAPPING: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _MATCH_SEQUENCE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _MATCH_KEYS: {
_Py_UopsSymbol *values_or_none;
values_or_none = sym_new_not_null(ctx);
stack_pointer[0] = values_or_none;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GET_ITER: {
_Py_UopsSymbol *iter;
iter = sym_new_not_null(ctx);
stack_pointer[-1] = iter;
break;
}
case _GET_YIELD_FROM_ITER: {
_Py_UopsSymbol *iter;
iter = sym_new_not_null(ctx);
stack_pointer[-1] = iter;
break;
}
/* _FOR_ITER is not a viable micro-op for tier 2 */
case _FOR_ITER_TIER_TWO: {
_Py_UopsSymbol *next;
next = sym_new_not_null(ctx);
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 */
case _ITER_CHECK_LIST: {
break;
}
/* _ITER_JUMP_LIST is not a viable micro-op for tier 2 */
case _GUARD_NOT_EXHAUSTED_LIST: {
break;
}
case _ITER_NEXT_LIST: {
_Py_UopsSymbol *next;
next = sym_new_not_null(ctx);
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _ITER_CHECK_TUPLE: {
break;
}
/* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 */
case _GUARD_NOT_EXHAUSTED_TUPLE: {
break;
}
case _ITER_NEXT_TUPLE: {
_Py_UopsSymbol *next;
next = sym_new_not_null(ctx);
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _ITER_CHECK_RANGE: {
break;
}
/* _ITER_JUMP_RANGE is not a viable micro-op for tier 2 */
case _GUARD_NOT_EXHAUSTED_RANGE: {
break;
}
case _ITER_NEXT_RANGE: {
_Py_UopsSymbol *iter;
_Py_UopsSymbol *next;
iter = stack_pointer[-1];
next = sym_new_type(ctx, &PyLong_Type);
(void)iter;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _FOR_ITER_GEN_FRAME: {
/* We are about to hit the end of the trace */
ctx->done = true;
break;
}
case _LOAD_SPECIAL: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self_or_null;
owner = stack_pointer[-1];
(void)owner;
attr = sym_new_not_null(ctx);
self_or_null = sym_new_unknown(ctx);
stack_pointer[-1] = attr;
stack_pointer[0] = self_or_null;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _WITH_EXCEPT_START: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _PUSH_EXC_INFO: {
_Py_UopsSymbol *prev_exc;
_Py_UopsSymbol *new_exc;
prev_exc = sym_new_not_null(ctx);
new_exc = sym_new_not_null(ctx);
stack_pointer[-1] = prev_exc;
stack_pointer[0] = new_exc;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_DORV_VALUES_INST_ATTR_FROM_DICT: {
break;
}
case _GUARD_KEYS_VERSION: {
break;
}
case _LOAD_ATTR_METHOD_WITH_VALUES: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self = NULL;
owner = stack_pointer[-1];
PyObject *descr = (PyObject *)this_instr->operand;
(void)descr;
attr = sym_new_not_null(ctx);
self = owner;
stack_pointer[-1] = attr;
stack_pointer[0] = self;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR_METHOD_NO_DICT: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self = NULL;
owner = stack_pointer[-1];
PyObject *descr = (PyObject *)this_instr->operand;
(void)descr;
attr = sym_new_not_null(ctx);
self = owner;
stack_pointer[-1] = attr;
stack_pointer[0] = self;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES: {
_Py_UopsSymbol *attr;
attr = sym_new_not_null(ctx);
stack_pointer[-1] = attr;
break;
}
case _LOAD_ATTR_NONDESCRIPTOR_NO_DICT: {
_Py_UopsSymbol *attr;
attr = sym_new_not_null(ctx);
stack_pointer[-1] = attr;
break;
}
case _CHECK_ATTR_METHOD_LAZY_DICT: {
break;
}
case _LOAD_ATTR_METHOD_LAZY_DICT: {
_Py_UopsSymbol *owner;
_Py_UopsSymbol *attr;
_Py_UopsSymbol *self = NULL;
owner = stack_pointer[-1];
PyObject *descr = (PyObject *)this_instr->operand;
(void)descr;
attr = sym_new_not_null(ctx);
self = owner;
stack_pointer[-1] = attr;
stack_pointer[0] = self;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _INSTRUMENTED_CALL is not a viable micro-op for tier 2 */
/* _CALL is not a viable micro-op for tier 2 */
case _CHECK_PERIODIC: {
break;
}
case _PY_FRAME_GENERAL: {
_Py_UopsSymbol **args;
_Py_UopsSymbol *self_or_null;
_Py_UopsSymbol *callable;
_Py_UOpsAbstractFrame *new_frame;
args = &stack_pointer[-oparg];
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
/* The _Py_UOpsAbstractFrame design assumes that we can copy arguments across directly */
(void)callable;
(void)self_or_null;
(void)args;
new_frame = NULL;
ctx->done = true;
stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)new_frame;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_FUNCTION_VERSION: {
break;
}
case _CHECK_METHOD_VERSION: {
break;
}
case _EXPAND_METHOD: {
_Py_UopsSymbol *method;
_Py_UopsSymbol *self;
method = sym_new_not_null(ctx);
self = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = method;
stack_pointer[-1 - oparg] = self;
break;
}
case _CHECK_IS_NOT_PY_CALLABLE: {
break;
}
case _CALL_NON_PY_GENERAL: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_CALL_BOUND_METHOD_EXACT_ARGS: {
_Py_UopsSymbol *null;
_Py_UopsSymbol *callable;
null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
sym_set_null(null);
sym_set_type(callable, &PyMethod_Type);
break;
}
case _INIT_CALL_BOUND_METHOD_EXACT_ARGS: {
_Py_UopsSymbol *callable;
_Py_UopsSymbol *func;
_Py_UopsSymbol *self;
callable = stack_pointer[-2 - oparg];
(void)callable;
func = sym_new_not_null(ctx);
self = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = func;
stack_pointer[-1 - oparg] = self;
break;
}
case _CHECK_PEP_523: {
/* Setting the eval frame function invalidates
* all executors, so no need to check dynamically */
if (_PyInterpreterState_GET()->eval_frame == NULL) {
REPLACE_OP(this_instr, _NOP, 0 ,0);
}
break;
}
case _CHECK_FUNCTION_EXACT_ARGS: {
_Py_UopsSymbol *self_or_null;
_Py_UopsSymbol *callable;
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
sym_set_type(callable, &PyFunction_Type);
(void)self_or_null;
break;
}
case _CHECK_STACK_SPACE: {
assert(corresponding_check_stack == NULL);
corresponding_check_stack = this_instr;
break;
}
case _INIT_CALL_PY_EXACT_ARGS: {
_Py_UopsSymbol **args;
_Py_UopsSymbol *self_or_null;
_Py_UopsSymbol *callable;
_Py_UOpsAbstractFrame *new_frame;
args = &stack_pointer[-oparg];
self_or_null = stack_pointer[-1 - oparg];
callable = stack_pointer[-2 - oparg];
int argcount = oparg;
(void)callable;
PyCodeObject *co = NULL;
assert((this_instr + 2)->opcode == _PUSH_FRAME);
uint64_t push_operand = (this_instr + 2)->operand;
if (push_operand & 1) {
co = (PyCodeObject *)(push_operand & ~1);
DPRINTF(3, "code=%p ", co);
assert(PyCode_Check(co));
}
else {
PyFunctionObject *func = (PyFunctionObject *)push_operand;
DPRINTF(3, "func=%p ", func);
if (func == NULL) {
DPRINTF(3, "\n");
DPRINTF(1, "Missing function\n");
ctx->done = true;
break;
}
co = (PyCodeObject *)func->func_code;
DPRINTF(3, "code=%p ", co);
}
assert(self_or_null != NULL);
assert(args != NULL);
if (sym_is_not_null(self_or_null)) {
// Bound method fiddling, same as _INIT_CALL_PY_EXACT_ARGS in VM
args--;
argcount++;
}
if (sym_is_null(self_or_null) || sym_is_not_null(self_or_null)) {
new_frame = frame_new(ctx, co, 0, args, argcount);
} else {
new_frame = frame_new(ctx, co, 0, NULL, 0);
}
stack_pointer[-2 - oparg] = (_Py_UopsSymbol *)new_frame;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _PUSH_FRAME: {
_Py_UOpsAbstractFrame *new_frame;
new_frame = (_Py_UOpsAbstractFrame *)stack_pointer[-1];
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
ctx->frame->stack_pointer = stack_pointer;
ctx->frame = new_frame;
ctx->curr_frame_depth++;
stack_pointer = new_frame->stack_pointer;
co = get_code(this_instr);
if (co == NULL) {
// should be about to _EXIT_TRACE anyway
ctx->done = true;
break;
}
/* Stack space handling */
int framesize = co->co_framesize;
assert(framesize > 0);
curr_space += framesize;
if (curr_space < 0 || curr_space > INT32_MAX) {
// won't fit in signed 32-bit int
ctx->done = true;
break;
}
max_space = curr_space > max_space ? curr_space : max_space;
if (first_valid_check_stack == NULL) {
first_valid_check_stack = corresponding_check_stack;
}
else if (corresponding_check_stack) {
// delete all but the first valid _CHECK_STACK_SPACE
corresponding_check_stack->opcode = _NOP;
}
corresponding_check_stack = NULL;
break;
}
case _CALL_TYPE_1: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_STR_1: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_TUPLE_1: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-3] = res;
stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _CALL_ALLOC_AND_ENTER_INIT is not a viable micro-op for tier 2 */
case _EXIT_INIT_CHECK: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_BUILTIN_CLASS: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_BUILTIN_O: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_BUILTIN_FAST: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_BUILTIN_FAST_WITH_KEYWORDS: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_LEN: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_ISINSTANCE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_METHOD_DESCRIPTOR_O: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_METHOD_DESCRIPTOR_NOARGS: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CALL_METHOD_DESCRIPTOR_FAST: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2 - oparg] = res;
stack_pointer += -1 - oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
/* _INSTRUMENTED_CALL_KW is not a viable micro-op for tier 2 */
/* _CALL_KW is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
/* _CALL_FUNCTION_EX is not a viable micro-op for tier 2 */
case _MAKE_FUNCTION: {
_Py_UopsSymbol *func;
func = sym_new_not_null(ctx);
stack_pointer[-1] = func;
break;
}
case _SET_FUNCTION_ATTRIBUTE: {
_Py_UopsSymbol *func_st;
func_st = sym_new_not_null(ctx);
stack_pointer[-2] = func_st;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _RETURN_GENERATOR: {
_Py_UopsSymbol *res;
ctx->frame->stack_pointer = stack_pointer;
frame_pop(ctx);
stack_pointer = ctx->frame->stack_pointer;
res = sym_new_unknown(ctx);
/* Stack space handling */
assert(corresponding_check_stack == NULL);
assert(co != NULL);
int framesize = co->co_framesize;
assert(framesize > 0);
assert(framesize <= curr_space);
curr_space -= framesize;
co = get_code(this_instr);
if (co == NULL) {
// might be impossible, but bailing is still safe
ctx->done = true;
}
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BUILD_SLICE: {
_Py_UopsSymbol *slice;
slice = sym_new_not_null(ctx);
stack_pointer[-2 - ((oparg == 3) ? 1 : 0)] = slice;
stack_pointer += -1 - ((oparg == 3) ? 1 : 0);
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CONVERT_VALUE: {
_Py_UopsSymbol *result;
result = sym_new_not_null(ctx);
stack_pointer[-1] = result;
break;
}
case _FORMAT_SIMPLE: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-1] = res;
break;
}
case _FORMAT_WITH_SPEC: {
_Py_UopsSymbol *res;
res = sym_new_not_null(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COPY: {
_Py_UopsSymbol *bottom;
_Py_UopsSymbol *top;
bottom = stack_pointer[-1 - (oparg-1)];
assert(oparg > 0);
top = bottom;
stack_pointer[0] = top;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _BINARY_OP: {
_Py_UopsSymbol *right;
_Py_UopsSymbol *left;
_Py_UopsSymbol *res;
right = stack_pointer[-1];
left = stack_pointer[-2];
PyTypeObject *ltype = sym_get_type(left);
PyTypeObject *rtype = sym_get_type(right);
if (ltype != NULL && (ltype == &PyLong_Type || ltype == &PyFloat_Type) &&
rtype != NULL && (rtype == &PyLong_Type || rtype == &PyFloat_Type))
{
if (oparg != NB_TRUE_DIVIDE && oparg != NB_INPLACE_TRUE_DIVIDE &&
ltype == &PyLong_Type && rtype == &PyLong_Type) {
/* If both inputs are ints and the op is not division the result is an int */
res = sym_new_type(ctx, &PyLong_Type);
}
else {
/* For any other op combining ints/floats the result is a float */
res = sym_new_type(ctx, &PyFloat_Type);
}
}
res = sym_new_unknown(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _SWAP: {
_Py_UopsSymbol *top;
_Py_UopsSymbol *bottom;
top = stack_pointer[-1];
bottom = stack_pointer[-2 - (oparg-2)];
stack_pointer[-2 - (oparg-2)] = top;
stack_pointer[-1] = bottom;
break;
}
/* _INSTRUMENTED_INSTRUCTION is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_JUMP_FORWARD is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_JUMP_BACKWARD is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_POP_JUMP_IF_TRUE is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_POP_JUMP_IF_FALSE is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_POP_JUMP_IF_NONE is not a viable micro-op for tier 2 */
/* _INSTRUMENTED_POP_JUMP_IF_NOT_NONE is not a viable micro-op for tier 2 */
case _GUARD_IS_TRUE_POP: {
_Py_UopsSymbol *flag;
flag = stack_pointer[-1];
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
eliminate_pop_guard(this_instr, value != Py_True);
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_IS_FALSE_POP: {
_Py_UopsSymbol *flag;
flag = stack_pointer[-1];
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
eliminate_pop_guard(this_instr, value != Py_False);
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_IS_NONE_POP: {
_Py_UopsSymbol *flag;
flag = stack_pointer[-1];
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
eliminate_pop_guard(this_instr, !Py_IsNone(value));
}
else if (sym_has_type(flag)) {
assert(!sym_matches_type(flag, &_PyNone_Type));
eliminate_pop_guard(this_instr, true);
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _GUARD_IS_NOT_NONE_POP: {
_Py_UopsSymbol *flag;
flag = stack_pointer[-1];
if (sym_is_const(flag)) {
PyObject *value = sym_get_const(flag);
assert(value != NULL);
eliminate_pop_guard(this_instr, Py_IsNone(value));
}
else if (sym_has_type(flag)) {
assert(!sym_matches_type(flag, &_PyNone_Type));
eliminate_pop_guard(this_instr, false);
}
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _JUMP_TO_TOP: {
ctx->done = true;
break;
}
case _SET_IP: {
break;
}
case _CHECK_STACK_SPACE_OPERAND: {
uint32_t framesize = (uint32_t)this_instr->operand;
(void)framesize;
/* We should never see _CHECK_STACK_SPACE_OPERANDs.
* They are only created at the end of this pass. */
Py_UNREACHABLE();
break;
}
case _SAVE_RETURN_OFFSET: {
break;
}
case _EXIT_TRACE: {
ctx->done = true;
break;
}
case _CHECK_VALIDITY: {
break;
}
case _LOAD_CONST_INLINE: {
_Py_UopsSymbol *value;
PyObject *ptr = (PyObject *)this_instr->operand;
value = sym_new_const(ctx, ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_CONST_INLINE_BORROW: {
_Py_UopsSymbol *value;
PyObject *ptr = (PyObject *)this_instr->operand;
value = sym_new_const(ctx, ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _POP_TOP_LOAD_CONST_INLINE_BORROW: {
_Py_UopsSymbol *value;
value = sym_new_not_null(ctx);
stack_pointer[-1] = value;
break;
}
case _LOAD_CONST_INLINE_WITH_NULL: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *null;
PyObject *ptr = (PyObject *)this_instr->operand;
value = sym_new_const(ctx, ptr);
null = sym_new_null(ctx);
stack_pointer[0] = value;
stack_pointer[1] = null;
stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _LOAD_CONST_INLINE_BORROW_WITH_NULL: {
_Py_UopsSymbol *value;
_Py_UopsSymbol *null;
PyObject *ptr = (PyObject *)this_instr->operand;
value = sym_new_const(ctx, ptr);
null = sym_new_null(ctx);
stack_pointer[0] = value;
stack_pointer[1] = null;
stack_pointer += 2;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _CHECK_FUNCTION: {
break;
}
case _INTERNAL_INCREMENT_OPT_COUNTER: {
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _COLD_EXIT: {
break;
}
case _DYNAMIC_EXIT: {
break;
}
case _START_EXECUTOR: {
break;
}
case _FATAL_ERROR: {
break;
}
case _CHECK_VALIDITY_AND_SET_IP: {
break;
}
case _DEOPT: {
break;
}
case _ERROR_POP_N: {
stack_pointer += -oparg;
assert(WITHIN_STACK_BOUNDS());
break;
}
case _TIER2_RESUME_CHECK: {
break;
}