Issue #14373: Other attempt to fix threaded test for lru_cache().

This commit is contained in:
Serhiy Storchaka 2015-06-08 12:44:43 +03:00
commit 29e26a72d7
1 changed files with 11 additions and 6 deletions

View File

@ -1110,10 +1110,10 @@ class TestLRU:
self.assertEqual(currsize, 0)
start = threading.Event()
def full(f, *args):
def full(k):
start.wait(10)
for _ in range(m):
f(*args)
self.assertEqual(f(k, 0), orig(k, 0))
def clear():
start.wait(10)
@ -1124,19 +1124,24 @@ class TestLRU:
sys.setswitchinterval(1e-6)
try:
# create n threads in order to fill cache
threads = [threading.Thread(target=full, args=[f, k, k])
threads = [threading.Thread(target=full, args=[k])
for k in range(n)]
with support.start_threads(threads):
start.set()
hits, misses, maxsize, currsize = f.cache_info()
if self.module is py_functools:
# XXX: Why can be not equal?
self.assertLessEqual(misses, n)
self.assertLessEqual(hits, m*n - misses)
else:
self.assertEqual(misses, n)
self.assertEqual(hits, m*n - misses)
self.assertEqual(currsize, n)
# create n threads in order to fill cache and 1 to clear it
threads = [threading.Thread(target=clear)]
threads += [threading.Thread(target=full, args=[f, k, k])
threads += [threading.Thread(target=full, args=[k])
for k in range(n)]
start.clear()
with support.start_threads(threads):