bpo-44826: Specialize STORE_ATTR (GH-27590)

* Generalize cache names for LOAD_ATTR to allow store and delete specializations.

* Factor out specialization of attribute dictionary access.

* Specialize STORE_ATTR.
This commit is contained in:
Mark Shannon 2021-08-09 10:40:21 +01:00 committed by GitHub
parent b854557b49
commit ac75f6bdd4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 345 additions and 93 deletions

View file

@ -23,7 +23,7 @@ typedef struct {
typedef struct {
uint32_t tp_version;
uint32_t dk_version_or_hint;
} _PyLoadAttrCache;
} _PyAttrCache;
typedef struct {
uint32_t module_keys_version;
@ -43,7 +43,7 @@ typedef struct {
typedef union {
_PyEntryZero zero;
_PyAdaptiveEntry adaptive;
_PyLoadAttrCache load_attr;
_PyAttrCache attr;
_PyLoadGlobalCache load_global;
} SpecializedCacheEntry;
@ -297,6 +297,7 @@ cache_backoff(_PyAdaptiveEntry *entry) {
/* Specialization functions */
int _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, SpecializedCacheEntry *cache);
int _Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, SpecializedCacheEntry *cache);
int _Py_Specialize_LoadGlobal(PyObject *globals, PyObject *builtins, _Py_CODEUNIT *instr, PyObject *name, SpecializedCacheEntry *cache);
int _Py_Specialize_BinarySubscr(PyObject *sub, PyObject *container, _Py_CODEUNIT *instr);

View file

@ -89,6 +89,9 @@ struct _dictkeysobject {
#define DK_ENTRIES(dk) \
((PyDictKeyEntry*)(&((int8_t*)((dk)->dk_indices))[DK_SIZE(dk) * DK_IXSIZE(dk)]))
extern uint64_t _pydict_global_version;
#define DICT_NEXT_VERSION() (++_pydict_global_version)
#ifdef __cplusplus
}

4
Include/opcode.h generated
View file

@ -149,6 +149,10 @@ extern "C" {
#define LOAD_GLOBAL_ADAPTIVE 41
#define LOAD_GLOBAL_MODULE 42
#define LOAD_GLOBAL_BUILTIN 43
#define STORE_ATTR_ADAPTIVE 44
#define STORE_ATTR_SPLIT_KEYS 45
#define STORE_ATTR_SLOT 46
#define STORE_ATTR_WITH_HINT 47
#ifdef NEED_OPCODE_JUMP_TABLES
static uint32_t _PyOpcode_RelativeJump[8] = {
0U,

View file

@ -233,6 +233,10 @@ def jabs_op(name, op):
"LOAD_GLOBAL_ADAPTIVE",
"LOAD_GLOBAL_MODULE",
"LOAD_GLOBAL_BUILTIN",
"STORE_ATTR_ADAPTIVE",
"STORE_ATTR_SPLIT_KEYS",
"STORE_ATTR_SLOT",
"STORE_ATTR_WITH_HINT",
]
_specialization_stats = [

View file

@ -0,0 +1,9 @@
Initial implementation of adaptive specialization of STORE_ATTR
Three specialized forms of STORE_ATTR are added:
* STORE_ATTR_SLOT
* STORE_ATTR_SPLIT_KEYS
* STORE_ATTR_WITH_HINT

View file

@ -234,9 +234,7 @@ static PyObject* dict_iter(PyDictObject *dict);
/*Global counter used to set ma_version_tag field of dictionary.
* It is incremented each time that a dictionary is created and each
* time that a dictionary is modified. */
static uint64_t pydict_global_version = 0;
#define DICT_NEXT_VERSION() (++pydict_global_version)
uint64_t _pydict_global_version = 0;
#include "clinic/dictobject.c.h"

View file

@ -2766,6 +2766,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
}
case TARGET(STORE_ATTR): {
PREDICTED(STORE_ATTR);
PyObject *name = GETITEM(names, oparg);
PyObject *owner = TOP();
PyObject *v = SECOND();
@ -3394,7 +3395,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyLoadAttrCache *cache1 = &caches[-1].load_attr;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, LOAD_ATTR);
assert(tp->tp_dictoffset > 0);
@ -3418,7 +3419,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
PyObject *res;
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyLoadAttrCache *cache1 = &caches[-1].load_attr;
_PyAttrCache *cache1 = &caches[-1].attr;
DEOPT_IF(!PyModule_CheckExact(owner), LOAD_ATTR);
PyDictObject *dict = (PyDictObject *)((PyModuleObject *)owner)->md_dict;
assert(dict != NULL);
@ -3443,7 +3444,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyLoadAttrCache *cache1 = &caches[-1].load_attr;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, LOAD_ATTR);
assert(tp->tp_dictoffset > 0);
@ -3472,7 +3473,7 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyLoadAttrCache *cache1 = &caches[-1].load_attr;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, LOAD_ATTR);
char *addr = (char *)owner + cache0->index;
@ -3486,6 +3487,121 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, InterpreterFrame *frame, int thr
DISPATCH();
}
case TARGET(STORE_ATTR_ADAPTIVE): {
assert(cframe.use_tracing == 0);
SpecializedCacheEntry *cache = GET_CACHE();
if (cache->adaptive.counter == 0) {
PyObject *owner = TOP();
PyObject *name = GETITEM(names, cache->adaptive.original_oparg);
next_instr--;
if (_Py_Specialize_StoreAttr(owner, next_instr, name, cache) < 0) {
goto error;
}
DISPATCH();
}
else {
STAT_INC(STORE_ATTR, deferred);
cache->adaptive.counter--;
oparg = cache->adaptive.original_oparg;
JUMP_TO_INSTRUCTION(STORE_ATTR);
}
}
case TARGET(STORE_ATTR_SPLIT_KEYS): {
assert(cframe.use_tracing == 0);
PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, STORE_ATTR);
assert(tp->tp_dictoffset > 0);
PyDictObject *dict = *(PyDictObject **)(((char *)owner) + tp->tp_dictoffset);
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
DEOPT_IF(dict->ma_keys->dk_version != cache1->dk_version_or_hint, STORE_ATTR);
/* Need to maintain ordering of dicts */
DEOPT_IF(cache0->index > 0 && dict->ma_values[cache0->index-1] == NULL, STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
record_cache_hit(cache0);
STACK_SHRINK(1);
PyObject *value = POP();
PyObject *old_value = dict->ma_values[cache0->index];
dict->ma_values[cache0->index] = value;
if (old_value == NULL) {
dict->ma_used++;
}
else {
Py_DECREF(old_value);
}
/* Ensure dict is GC tracked if it needs to be */
if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) {
_PyObject_GC_TRACK(dict);
}
/* PEP 509 */
dict->ma_version_tag = DICT_NEXT_VERSION();
Py_DECREF(owner);
DISPATCH();
}
case TARGET(STORE_ATTR_WITH_HINT): {
assert(cframe.use_tracing == 0);
PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, STORE_ATTR);
assert(tp->tp_dictoffset > 0);
PyDictObject *dict = *(PyDictObject **)(((char *)owner) + tp->tp_dictoffset);
DEOPT_IF(dict == NULL, STORE_ATTR);
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(names, cache0->original_oparg);
uint32_t hint = cache1->dk_version_or_hint;
DEOPT_IF(hint >= dict->ma_keys->dk_nentries, STORE_ATTR);
PyDictKeyEntry *ep = DK_ENTRIES(dict->ma_keys) + hint;
DEOPT_IF(ep->me_key != name, STORE_ATTR);
PyObject *old_value = ep->me_value;
DEOPT_IF(old_value == NULL, STORE_ATTR);
STAT_INC(STORE_ATTR, hit);
record_cache_hit(cache0);
STACK_SHRINK(1);
PyObject *value = POP();
ep->me_value = value;
Py_DECREF(old_value);
/* Ensure dict is GC tracked if it needs to be */
if (!_PyObject_GC_IS_TRACKED(dict) && _PyObject_GC_MAY_BE_TRACKED(value)) {
_PyObject_GC_TRACK(dict);
}
/* PEP 509 */
dict->ma_version_tag = DICT_NEXT_VERSION();
Py_DECREF(owner);
DISPATCH();
}
case TARGET(STORE_ATTR_SLOT): {
assert(cframe.use_tracing == 0);
PyObject *owner = TOP();
PyTypeObject *tp = Py_TYPE(owner);
SpecializedCacheEntry *caches = GET_CACHE();
_PyAdaptiveEntry *cache0 = &caches[0].adaptive;
_PyAttrCache *cache1 = &caches[-1].attr;
assert(cache1->tp_version != 0);
DEOPT_IF(tp->tp_version_tag != cache1->tp_version, STORE_ATTR);
char *addr = (char *)owner + cache0->index;
STAT_INC(STORE_ATTR, hit);
record_cache_hit(cache0);
STACK_SHRINK(1);
PyObject *value = POP();
PyObject *old_value = *(PyObject **)addr;
*(PyObject **)addr = value;
Py_XDECREF(old_value);
Py_DECREF(owner);
DISPATCH();
}
case TARGET(COMPARE_OP): {
assert(oparg <= Py_GE);
PyObject *right = POP();
@ -4429,6 +4545,7 @@ opname ## _miss: \
}
MISS_WITH_CACHE(LOAD_ATTR)
MISS_WITH_CACHE(STORE_ATTR)
MISS_WITH_CACHE(LOAD_GLOBAL)
MISS_WITH_OPARG_COUNTER(BINARY_SUBSCR)

View file

@ -43,10 +43,10 @@ static void *opcode_targets[256] = {
&&TARGET_LOAD_GLOBAL_ADAPTIVE,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
&&TARGET_STORE_ATTR_ADAPTIVE,
&&TARGET_STORE_ATTR_SPLIT_KEYS,
&&TARGET_STORE_ATTR_SLOT,
&&TARGET_STORE_ATTR_WITH_HINT,
&&_unknown_opcode,
&&TARGET_WITH_EXCEPT_START,
&&TARGET_GET_AITER,

View file

@ -173,6 +173,7 @@ _Py_PrintSpecializationStats(void)
print_stats(out, &_specialization_stats[LOAD_ATTR], "load_attr");
print_stats(out, &_specialization_stats[LOAD_GLOBAL], "load_global");
print_stats(out, &_specialization_stats[BINARY_SUBSCR], "binary_subscr");
print_stats(out, &_specialization_stats[STORE_ATTR], "store_attr");
if (out != stderr) {
fclose(out);
}
@ -262,13 +263,15 @@ static uint8_t adaptive_opcodes[256] = {
[LOAD_ATTR] = LOAD_ATTR_ADAPTIVE,
[LOAD_GLOBAL] = LOAD_GLOBAL_ADAPTIVE,
[BINARY_SUBSCR] = BINARY_SUBSCR_ADAPTIVE,
[STORE_ATTR] = STORE_ATTR_ADAPTIVE,
};
/* The number of cache entries required for a "family" of instructions. */
static uint8_t cache_requirements[256] = {
[LOAD_ATTR] = 2, /* _PyAdaptiveEntry and _PyLoadAttrCache */
[LOAD_ATTR] = 2, /* _PyAdaptiveEntry and _PyAttrCache */
[LOAD_GLOBAL] = 2, /* _PyAdaptiveEntry and _PyLoadGlobalCache */
[BINARY_SUBSCR] = 0,
[STORE_ATTR] = 2, /* _PyAdaptiveEntry and _PyAttrCache */
};
/* Return the oparg for the cache_offset and instruction index.
@ -416,7 +419,7 @@ _Py_Quicken(PyCodeObject *code) {
static int
specialize_module_load_attr(
PyObject *owner, _Py_CODEUNIT *instr, PyObject *name,
_PyAdaptiveEntry *cache0, _PyLoadAttrCache *cache1)
_PyAdaptiveEntry *cache0, _PyAttrCache *cache1)
{
PyModuleObject *m = (PyModuleObject *)owner;
PyObject *value = NULL;
@ -475,15 +478,24 @@ typedef enum {
MUTABLE, /* Instance of a mutable class; might, or might not, be a descriptor */
ABSENT, /* Attribute is not present on the class */
DUNDER_CLASS, /* __class__ attribute */
GETATTRIBUTE_OVERRIDDEN /* __getattribute__ has been overridden */
GETSET_OVERRIDDEN /* __getattribute__ or __setattr__ has been overridden */
} DesciptorClassification;
static DesciptorClassification
analyze_descriptor(PyTypeObject *type, PyObject *name, PyObject **descr)
analyze_descriptor(PyTypeObject *type, PyObject *name, PyObject **descr, int store)
{
if (type->tp_getattro != PyObject_GenericGetAttr) {
*descr = NULL;
return GETATTRIBUTE_OVERRIDDEN;
if (store) {
if (type->tp_setattro != PyObject_GenericSetAttr) {
*descr = NULL;
return GETSET_OVERRIDDEN;
}
}
else {
if (type->tp_getattro != PyObject_GenericGetAttr) {
*descr = NULL;
return GETSET_OVERRIDDEN;
}
}
PyObject *descriptor = _PyType_Lookup(type, name);
*descr = descriptor;
@ -522,11 +534,92 @@ analyze_descriptor(PyTypeObject *type, PyObject *name, PyObject **descr)
return NON_DESCRIPTOR;
}
static int
specialize_dict_access(
PyObject *owner, _Py_CODEUNIT *instr, PyTypeObject *type,
DesciptorClassification kind, PyObject *name,
_PyAdaptiveEntry *cache0, _PyAttrCache *cache1,
int base_op, int split_op, int hint_op)
{
assert(kind == NON_OVERRIDING || kind == NON_DESCRIPTOR || kind == ABSENT);
// No desciptor, or non overriding.
if (type->tp_dictoffset < 0) {
SPECIALIZATION_FAIL(base_op, type, name, "negative offset");
return 0;
}
if (type->tp_dictoffset > 0) {
PyObject **dictptr = (PyObject **) ((char *)owner + type->tp_dictoffset);
if (*dictptr == NULL || !PyDict_CheckExact(*dictptr)) {
SPECIALIZATION_FAIL(base_op, type, name, "no dict or not a dict");
return 0;
}
// We found an instance with a __dict__.
PyDictObject *dict = (PyDictObject *)*dictptr;
if ((type->tp_flags & Py_TPFLAGS_HEAPTYPE)
&& dict->ma_keys == ((PyHeapTypeObject*)type)->ht_cached_keys
) {
// Keys are shared
assert(PyUnicode_CheckExact(name));
Py_hash_t hash = PyObject_Hash(name);
if (hash == -1) {
return -1;
}
PyObject *value;
Py_ssize_t index = _Py_dict_lookup(dict, name, hash, &value);
assert (index != DKIX_ERROR);
if (index != (uint16_t)index) {
SPECIALIZATION_FAIL(base_op, type, name,
index < 0 ? "attribute not in dict" : "index out of range");
return 0;
}
uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(dict);
if (keys_version == 0) {
SPECIALIZATION_FAIL(base_op, type, name, "no more key versions");
return 0;
}
cache1->dk_version_or_hint = keys_version;
cache1->tp_version = type->tp_version_tag;
cache0->index = (uint16_t)index;
*instr = _Py_MAKECODEUNIT(split_op, _Py_OPARG(*instr));
return 0;
}
else {
PyObject *value = NULL;
Py_ssize_t hint =
_PyDict_GetItemHint(dict, name, -1, &value);
if (hint != (uint32_t)hint) {
SPECIALIZATION_FAIL(base_op, type, name, "hint out of range");
return 0;
}
cache1->dk_version_or_hint = (uint32_t)hint;
cache1->tp_version = type->tp_version_tag;
*instr = _Py_MAKECODEUNIT(hint_op, _Py_OPARG(*instr));
return 1;
}
}
assert(type->tp_dictoffset == 0);
/* No attribute in instance dictionary */
switch(kind) {
case NON_OVERRIDING:
SPECIALIZATION_FAIL(base_op, type, name, "non-overriding descriptor");
return 0;
case NON_DESCRIPTOR:
/* To do -- Optimize this case */
SPECIALIZATION_FAIL(base_op, type, name, "non descriptor");
return 0;
case ABSENT:
SPECIALIZATION_FAIL(base_op, type, name, "no attribute");
return 0;
default:
Py_UNREACHABLE();
}
}
int
_Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, SpecializedCacheEntry *cache)
{
_PyAdaptiveEntry *cache0 = &cache->adaptive;
_PyLoadAttrCache *cache1 = &cache[-1].load_attr;
_PyAttrCache *cache1 = &cache[-1].attr;
if (PyModule_CheckExact(owner)) {
int err = specialize_module_load_attr(owner, instr, name, cache0, cache1);
if (err) {
@ -541,7 +634,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
}
}
PyObject *descr;
DesciptorClassification kind = analyze_descriptor(type, name, &descr);
DesciptorClassification kind = analyze_descriptor(type, name, &descr, 0);
switch(kind) {
case OVERRIDING:
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "overriding descriptor");
@ -557,6 +650,10 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
PyMemberDescrObject *member = (PyMemberDescrObject *)descr;
struct PyMemberDef *dmem = member->d_member;
Py_ssize_t offset = dmem->offset;
if (dmem->flags & PY_AUDIT_READ) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "audit read");
goto fail;
}
if (offset != (uint16_t)offset) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "offset out of range");
goto fail;
@ -583,7 +680,7 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
case MUTABLE:
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "mutable class attribute");
goto fail;
case GETATTRIBUTE_OVERRIDDEN:
case GETSET_OVERRIDDEN:
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "__getattribute__ overridden");
goto fail;
case NON_OVERRIDING:
@ -591,77 +688,15 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
case ABSENT:
break;
}
assert(kind == NON_OVERRIDING || kind == NON_DESCRIPTOR || kind == ABSENT);
// No desciptor, or non overriding.
if (type->tp_dictoffset < 0) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "negative offset");
goto fail;
int err = specialize_dict_access(
owner, instr, type, kind, name, cache0, cache1,
LOAD_ATTR, LOAD_ATTR_SPLIT_KEYS, LOAD_ATTR_WITH_HINT
);
if (err < 0) {
return -1;
}
if (type->tp_dictoffset > 0) {
PyObject **dictptr = (PyObject **) ((char *)owner + type->tp_dictoffset);
if (*dictptr == NULL || !PyDict_CheckExact(*dictptr)) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "no dict or not a dict");
goto fail;
}
// We found an instance with a __dict__.
PyDictObject *dict = (PyDictObject *)*dictptr;
if ((type->tp_flags & Py_TPFLAGS_HEAPTYPE)
&& dict->ma_keys == ((PyHeapTypeObject*)type)->ht_cached_keys
) {
// Keys are shared
assert(PyUnicode_CheckExact(name));
Py_hash_t hash = PyObject_Hash(name);
if (hash == -1) {
return -1;
}
PyObject *value;
Py_ssize_t index = _Py_dict_lookup(dict, name, hash, &value);
assert (index != DKIX_ERROR);
if (index != (uint16_t)index) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name,
index < 0 ? "attribute not in dict" : "index out of range");
goto fail;
}
uint32_t keys_version = _PyDictKeys_GetVersionForCurrentState(dict);
if (keys_version == 0) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "no more key versions");
goto fail;
}
cache1->dk_version_or_hint = keys_version;
cache1->tp_version = type->tp_version_tag;
cache0->index = (uint16_t)index;
*instr = _Py_MAKECODEUNIT(LOAD_ATTR_SPLIT_KEYS, _Py_OPARG(*instr));
goto success;
}
else {
PyObject *value = NULL;
Py_ssize_t hint =
_PyDict_GetItemHint(dict, name, -1, &value);
if (hint != (uint32_t)hint) {
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "hint out of range");
goto fail;
}
cache1->dk_version_or_hint = (uint32_t)hint;
cache1->tp_version = type->tp_version_tag;
*instr = _Py_MAKECODEUNIT(LOAD_ATTR_WITH_HINT, _Py_OPARG(*instr));
goto success;
}
}
assert(type->tp_dictoffset == 0);
/* No attribute in instance dictionary */
switch(kind) {
case NON_OVERRIDING:
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "non-overriding descriptor");
goto fail;
case NON_DESCRIPTOR:
/* To do -- Optimize this case */
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "non descriptor");
goto fail;
case ABSENT:
SPECIALIZATION_FAIL(LOAD_ATTR, type, name, "no attribute");
goto fail;
default:
Py_UNREACHABLE();
if (err) {
goto success;
}
fail:
STAT_INC(LOAD_ATTR, specialization_failure);
@ -675,6 +710,87 @@ _Py_Specialize_LoadAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, Sp
return 0;
}
int
_Py_Specialize_StoreAttr(PyObject *owner, _Py_CODEUNIT *instr, PyObject *name, SpecializedCacheEntry *cache)
{
_PyAdaptiveEntry *cache0 = &cache->adaptive;
_PyAttrCache *cache1 = &cache[-1].attr;
PyTypeObject *type = Py_TYPE(owner);
if (PyModule_CheckExact(owner)) {
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "module attribute");
goto fail;
}
PyObject *descr;
DesciptorClassification kind = analyze_descriptor(type, name, &descr, 1);
switch(kind) {
case OVERRIDING:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "overriding descriptor");
goto fail;
case METHOD:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "method");
goto fail;
case PROPERTY:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "property");
goto fail;
case OBJECT_SLOT:
{
PyMemberDescrObject *member = (PyMemberDescrObject *)descr;
struct PyMemberDef *dmem = member->d_member;
Py_ssize_t offset = dmem->offset;
if (dmem->flags & READONLY) {
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "read only");
goto fail;
}
if (offset != (uint16_t)offset) {
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "offset out of range");
goto fail;
}
assert(dmem->type == T_OBJECT_EX);
assert(offset > 0);
cache0->index = (uint16_t)offset;
cache1->tp_version = type->tp_version_tag;
*instr = _Py_MAKECODEUNIT(STORE_ATTR_SLOT, _Py_OPARG(*instr));
goto success;
}
case DUNDER_CLASS:
case OTHER_SLOT:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "other slot");
goto fail;
case MUTABLE:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "mutable class attribute");
goto fail;
case GETSET_OVERRIDDEN:
SPECIALIZATION_FAIL(STORE_ATTR, type, name, "__setattr__ overridden");
goto fail;
case NON_OVERRIDING:
case NON_DESCRIPTOR:
case ABSENT:
break;
}
int err = specialize_dict_access(
owner, instr, type, kind, name, cache0, cache1,
STORE_ATTR, STORE_ATTR_SPLIT_KEYS, STORE_ATTR_WITH_HINT
);
if (err < 0) {
return -1;
}
if (err) {
goto success;
}
fail:
STAT_INC(STORE_ATTR, specialization_failure);
assert(!PyErr_Occurred());
cache_backoff(cache0);
return 0;
success:
STAT_INC(STORE_ATTR, specialization_success);
assert(!PyErr_Occurred());
cache0->counter = saturating_start();
return 0;
}
int
_Py_Specialize_LoadGlobal(
PyObject *globals, PyObject *builtins,