Fix-up comments and add a sentinel variable for clarity.

This commit is contained in:
Raymond Hettinger 2012-03-30 21:50:40 -07:00
parent 41eb79a016
commit 7f7a5a7b87
1 changed files with 17 additions and 16 deletions

View File

@ -154,8 +154,8 @@ def lru_cache(maxsize=100, typed=False):
Arguments to the cached function must be hashable. Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with View the cache statistics named tuple (hits, misses, maxsize, currsize)
f.cache_info(). Clear the cache and statistics with f.cache_clear(). with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__. Access the underlying function with f.__wrapped__.
See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
@ -169,18 +169,19 @@ def lru_cache(maxsize=100, typed=False):
def decorating_function(user_function): def decorating_function(user_function):
cache = dict() cache = {}
hits = misses = 0 hits = misses = 0
kwd_mark = (object(),) # separate positional and keyword args kwd_mark = (object(),) # separate positional and keyword args
cache_get = cache.get # bound method to lookup key or return None cache_get = cache.get # bound method to lookup key or return None
_len = len # localize the global len() function sentinel = object() # unique object used with cache_get
lock = Lock() # because linkedlist updates aren't threadsafe _len = len # localize the global len() function
root = [] # root of the circular doubly linked list lock = Lock() # because linkedlist updates aren't threadsafe
root[:] = [root, root, None, None] # initialize by pointing to self root = [] # root of the circular doubly linked list
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields root[:] = [root, root, None, None] # initialize by pointing to self
PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type): def make_key(args, kwds, typed, tuple=tuple, sorted=sorted, type=type):
# helper function to build a cache key from positional and keyword args # build a cache key from positional and keyword args
key = args key = args
if kwds: if kwds:
sorted_items = tuple(sorted(kwds.items())) sorted_items = tuple(sorted(kwds.items()))
@ -194,7 +195,7 @@ def lru_cache(maxsize=100, typed=False):
if maxsize == 0: if maxsize == 0:
def wrapper(*args, **kwds): def wrapper(*args, **kwds):
# no caching, just do a statistics update after a successful call # no caching, just a statistics update after a successful call
nonlocal misses nonlocal misses
result = user_function(*args, **kwds) result = user_function(*args, **kwds)
misses += 1 misses += 1
@ -206,8 +207,8 @@ def lru_cache(maxsize=100, typed=False):
# simple caching without ordering or size limit # simple caching without ordering or size limit
nonlocal hits, misses nonlocal hits, misses
key = make_key(args, kwds, typed) if kwds or typed else args key = make_key(args, kwds, typed) if kwds or typed else args
result = cache_get(key, root) # root used here as a unique not-found sentinel result = cache_get(key, sentinel)
if result is not root: if result is not sentinel:
hits += 1 hits += 1
return result return result
result = user_function(*args, **kwds) result = user_function(*args, **kwds)
@ -224,7 +225,7 @@ def lru_cache(maxsize=100, typed=False):
with lock: with lock:
link = cache_get(key) link = cache_get(key)
if link is not None: if link is not None:
# record recent use of the key by moving it to the front of the list # move the link to the front of the circular queue
link_prev, link_next, key, result = link link_prev, link_next, key, result = link
link_prev[NEXT] = link_next link_prev[NEXT] = link_next
link_next[PREV] = link_prev link_next[PREV] = link_prev
@ -237,7 +238,7 @@ def lru_cache(maxsize=100, typed=False):
result = user_function(*args, **kwds) result = user_function(*args, **kwds)
with lock: with lock:
if _len(cache) < maxsize: if _len(cache) < maxsize:
# put result in a new link at the front of the list # put result in a new link at the front of the queue
last = root[PREV] last = root[PREV]
link = [last, root, key, result] link = [last, root, key, result]
cache[key] = last[NEXT] = root[PREV] = link cache[key] = last[NEXT] = root[PREV] = link