Issue 10593: Adopt Nick's suggestion for an lru_cache with maxsize=None.

This commit is contained in:
Raymond Hettinger 2010-12-01 03:45:41 +00:00
parent ed3a7d2d60
commit c79fb0e52d
3 changed files with 79 additions and 26 deletions

View File

@ -32,7 +32,7 @@ The :mod:`functools` module defines the following functions:
A compare function is any callable that accept two arguments, compares them,
and returns a negative number for less-than, zero for equality, or a positive
number for greater-than. A key function is a callable that accepts one
argument and returns another value that indicates the position in the desired
argument and returns another value indicating the position in the desired
collation sequence.
Example::
@ -51,10 +51,14 @@ The :mod:`functools` module defines the following functions:
Since a dictionary is used to cache results, the positional and keyword
arguments to the function must be hashable.
If *maxsize* is set to None, the LRU feature is disabled and the cache
can grow without bound.
To help measure the effectiveness of the cache and tune the *maxsize*
parameter, the wrapped function is instrumented with a :func:`cache_info`
function that returns a :term:`named tuple` showing *hits*, *misses*,
*maxsize* and *currsize*.
*maxsize* and *currsize*. In a multi-threaded environment, the hits
and misses are approximate.
The decorator also provides a :func:`cache_clear` function for clearing or
invalidating the cache.
@ -89,13 +93,26 @@ The :mod:`functools` module defines the following functions:
>>> print(get_pep.cache_info())
CacheInfo(hits=3, misses=8, maxsize=20, currsize=8)
Example of efficiently computing
`Fibonacci numbers <http://en.wikipedia.org/wiki/Fibonacci_number>`_
using a cache to implement a
`dynamic programming <http://en.wikipedia.org/wiki/Dynamic_programming>`_
technique::
@lru_cache(maxsize=None)
def fib(n):
if n < 2:
return n
return fib(n-1) + fib(n-2)
>>> print([fib(n) for n in range(16)])
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610]
>>> print(fib.cache_info())
CacheInfo(hits=28, misses=16, maxsize=None, currsize=16)
.. versionadded:: 3.2
.. seealso::
Recipe for a `plain cache without the LRU feature
<http://code.activestate.com/recipes/577479-simple-caching-decorator/>`_.
.. decorator:: total_ordering
Given a class defining one or more rich comparison ordering methods, this

View File

@ -119,6 +119,9 @@ _CacheInfo = namedtuple("CacheInfo", "hits misses maxsize currsize")
def lru_cache(maxsize=100):
"""Least-recently-used cache decorator.
If *maxsize* is set to None, the LRU features are disabled and the cache
can grow without bound.
Arguments to the cached function must be hashable.
View the cache statistics named tuple (hits, misses, maxsize, currsize) with
@ -136,13 +139,32 @@ def lru_cache(maxsize=100):
def decorating_function(user_function,
tuple=tuple, sorted=sorted, len=len, KeyError=KeyError):
cache = OrderedDict() # ordered least recent to most recent
cache_popitem = cache.popitem
cache_renew = cache.move_to_end
hits = misses = 0
kwd_mark = object() # separates positional and keyword args
lock = Lock()
if maxsize is None:
cache = dict() # simple cache without ordering or size limit
@wraps(user_function)
def wrapper(*args, **kwds):
nonlocal hits, misses
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
try:
result = cache[key]
hits += 1
except KeyError:
result = user_function(*args, **kwds)
cache[key] = result
misses += 1
return result
else:
cache = OrderedDict() # ordered least recent to most recent
cache_popitem = cache.popitem
cache_renew = cache.move_to_end
@wraps(user_function)
def wrapper(*args, **kwds):
nonlocal hits, misses

View File

@ -586,6 +586,20 @@ class TestLRU(unittest.TestCase):
self.assertEqual(misses, 4)
self.assertEqual(currsize, 2)
def test_lru_with_maxsize_none(self):
@functools.lru_cache(maxsize=None)
def fib(n):
if n < 2:
return n
return fib(n-1) + fib(n-2)
self.assertEqual([fib(n) for n in range(16)],
[0, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610])
self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=28, misses=16, maxsize=None, currsize=16))
fib.cache_clear()
self.assertEqual(fib.cache_info(),
functools._CacheInfo(hits=0, misses=0, maxsize=None, currsize=0))
def test_main(verbose=None):
test_classes = (
TestPartial,