lru_cache: Add more comments. Fix comment typos. Clarify a comment. (GH-11795)
This commit is contained in:
parent
7ab3d1573c
commit
2dda72a2e8
|
@ -661,6 +661,26 @@ sequence is empty.");
|
||||||
|
|
||||||
/* lru_cache object **********************************************************/
|
/* lru_cache object **********************************************************/
|
||||||
|
|
||||||
|
/* There are four principal algorithmic differences from the pure python version:
|
||||||
|
|
||||||
|
1). The C version relies on the GIL instead of having its own reentrant lock.
|
||||||
|
|
||||||
|
2). The prev/next link fields use borrowed references.
|
||||||
|
|
||||||
|
3). For a full cache, the pure python version rotates the location of the
|
||||||
|
root entry so that it never has to move individual links and it can
|
||||||
|
limit updates to just the key and result fields. However, in the C
|
||||||
|
version, links are temporarily removed while the cache dict updates are
|
||||||
|
occurring. Afterwards, they are appended or prepended back into the
|
||||||
|
doubly-linked lists.
|
||||||
|
|
||||||
|
4) In the Python version, the _HashSeq class is used to prevent __hash__
|
||||||
|
from being called more than once. In the C version, the "known hash"
|
||||||
|
variants of dictionary calls as used to the same effect.
|
||||||
|
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
/* this object is used delimit args and keywords in the cache keys */
|
/* this object is used delimit args and keywords in the cache keys */
|
||||||
static PyObject *kwd_mark = NULL;
|
static PyObject *kwd_mark = NULL;
|
||||||
|
|
||||||
|
@ -1009,14 +1029,15 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds
|
||||||
link = self->root.next;
|
link = self->root.next;
|
||||||
lru_cache_extract_link(link);
|
lru_cache_extract_link(link);
|
||||||
/* Remove it from the cache.
|
/* Remove it from the cache.
|
||||||
The cache dict holds one reference to the link,
|
The cache dict holds one reference to the link.
|
||||||
and the linked list holds yet one reference to it. */
|
We created one other reference when the link was created.
|
||||||
|
The linked list only has borrowed references. */
|
||||||
popresult = _PyDict_Pop_KnownHash(self->cache, link->key,
|
popresult = _PyDict_Pop_KnownHash(self->cache, link->key,
|
||||||
link->hash, Py_None);
|
link->hash, Py_None);
|
||||||
if (popresult == Py_None) {
|
if (popresult == Py_None) {
|
||||||
/* Getting here means that the user function call or another
|
/* Getting here means that the user function call or another
|
||||||
thread has already removed the old key from the dictionary.
|
thread has already removed the old key from the dictionary.
|
||||||
This link is now an orpan. Since we don't want to leave the
|
This link is now an orphan. Since we don't want to leave the
|
||||||
cache in an inconsistent state, we don't restore the link. */
|
cache in an inconsistent state, we don't restore the link. */
|
||||||
Py_DECREF(popresult);
|
Py_DECREF(popresult);
|
||||||
Py_DECREF(link);
|
Py_DECREF(link);
|
||||||
|
@ -1048,7 +1069,7 @@ bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds
|
||||||
prev and next fields set to valid values. We have to wait
|
prev and next fields set to valid values. We have to wait
|
||||||
for successful insertion in the cache dict before adding the
|
for successful insertion in the cache dict before adding the
|
||||||
link to the linked list. Otherwise, the potentially reentrant
|
link to the linked list. Otherwise, the potentially reentrant
|
||||||
__eq__ call could cause the then ophan link to be visited. */
|
__eq__ call could cause the then orphan link to be visited. */
|
||||||
if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
|
if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link,
|
||||||
hash) < 0) {
|
hash) < 0) {
|
||||||
/* Somehow the cache dict update failed. We no longer can
|
/* Somehow the cache dict update failed. We no longer can
|
||||||
|
|
Loading…
Reference in New Issue