2015-09-26 05:38:01 -03:00
|
|
|
import sys
|
|
|
|
import warnings
|
|
|
|
from inspect import isabstract
|
2023-09-14 15:33:18 -03:00
|
|
|
from typing import Any
|
2023-09-10 21:07:18 -03:00
|
|
|
|
2015-09-26 05:38:01 -03:00
|
|
|
from test import support
|
2020-08-07 18:55:35 -03:00
|
|
|
from test.support import os_helper
|
2024-01-21 06:25:15 -04:00
|
|
|
from test.support import refleak_helper
|
2023-09-10 21:07:18 -03:00
|
|
|
|
2023-09-11 04:02:35 -03:00
|
|
|
from .runtests import HuntRefleak
|
|
|
|
from .utils import clear_caches
|
2020-08-07 18:55:35 -03:00
|
|
|
|
2018-02-18 08:41:58 -04:00
|
|
|
try:
|
|
|
|
from _abc import _get_dump
|
|
|
|
except ImportError:
|
2019-04-08 20:36:34 -03:00
|
|
|
import weakref
|
|
|
|
|
2018-02-18 08:41:58 -04:00
|
|
|
def _get_dump(cls):
|
2019-04-08 20:36:34 -03:00
|
|
|
# Reimplement _get_dump() for pure-Python implementation of
|
|
|
|
# the abc module (Lib/_py_abc.py)
|
|
|
|
registry_weakrefs = set(weakref.ref(obj) for obj in cls._abc_registry)
|
|
|
|
return (registry_weakrefs, cls._abc_cache,
|
2018-02-18 08:41:58 -04:00
|
|
|
cls._abc_negative_cache, cls._abc_negative_cache_version)
|
2015-09-26 05:38:01 -03:00
|
|
|
|
|
|
|
|
2023-09-09 06:18:14 -03:00
|
|
|
def runtest_refleak(test_name, test_func,
|
|
|
|
hunt_refleak: HuntRefleak,
|
|
|
|
quiet: bool):
|
2015-09-26 05:38:01 -03:00
|
|
|
"""Run a test multiple times, looking for reference leaks.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
False if the test didn't leak references; True if we detected refleaks.
|
|
|
|
"""
|
|
|
|
# This code is hackish and inelegant, but it seems to do the job.
|
|
|
|
import copyreg
|
|
|
|
import collections.abc
|
|
|
|
|
|
|
|
if not hasattr(sys, 'gettotalrefcount'):
|
|
|
|
raise Exception("Tracking reference leaks requires a debug build "
|
|
|
|
"of Python")
|
|
|
|
|
2019-04-09 09:23:47 -03:00
|
|
|
# Avoid false positives due to various caches
|
|
|
|
# filling slowly with random data:
|
|
|
|
warm_caches()
|
|
|
|
|
2015-09-26 05:38:01 -03:00
|
|
|
# Save current values for dash_R_cleanup() to restore.
|
|
|
|
fs = warnings.filters[:]
|
|
|
|
ps = copyreg.dispatch_table.copy()
|
|
|
|
pic = sys.path_importer_cache.copy()
|
2023-09-14 15:33:18 -03:00
|
|
|
zdc: dict[str, Any] | None
|
2015-09-26 05:38:01 -03:00
|
|
|
try:
|
|
|
|
import zipimport
|
|
|
|
except ImportError:
|
|
|
|
zdc = None # Run unmodified on platforms without zipimport support
|
|
|
|
else:
|
2023-11-30 19:00:14 -04:00
|
|
|
# private attribute that mypy doesn't know about:
|
|
|
|
zdc = zipimport._zip_directory_cache.copy() # type: ignore[attr-defined]
|
2015-09-26 05:38:01 -03:00
|
|
|
abcs = {}
|
2023-05-13 05:55:35 -03:00
|
|
|
for abc in [getattr(collections.abc, a) for a in collections.abc.__all__]:
|
|
|
|
if not isabstract(abc):
|
|
|
|
continue
|
|
|
|
for obj in abc.__subclasses__() + [abc]:
|
|
|
|
abcs[obj] = _get_dump(obj)[0]
|
2015-09-26 05:38:01 -03:00
|
|
|
|
2017-09-01 08:05:27 -03:00
|
|
|
# bpo-31217: Integer pool to get a single integer object for the same
|
|
|
|
# value. The pool is used to prevent false alarm when checking for memory
|
|
|
|
# block leaks. Fill the pool with values in -1000..1000 which are the most
|
|
|
|
# common (reference, memory block, file descriptor) differences.
|
|
|
|
int_pool = {value: value for value in range(-1000, 1000)}
|
|
|
|
def get_pooled_int(value):
|
|
|
|
return int_pool.setdefault(value, value)
|
|
|
|
|
2023-09-09 06:18:14 -03:00
|
|
|
warmups = hunt_refleak.warmups
|
|
|
|
runs = hunt_refleak.runs
|
|
|
|
filename = hunt_refleak.filename
|
|
|
|
repcount = warmups + runs
|
2019-04-09 09:23:47 -03:00
|
|
|
|
|
|
|
# Pre-allocate to ensure that the loop doesn't allocate anything new
|
|
|
|
rep_range = list(range(repcount))
|
2015-09-26 05:38:01 -03:00
|
|
|
rc_deltas = [0] * repcount
|
|
|
|
alloc_deltas = [0] * repcount
|
2015-10-02 19:20:56 -03:00
|
|
|
fd_deltas = [0] * repcount
|
2019-04-09 09:23:47 -03:00
|
|
|
getallocatedblocks = sys.getallocatedblocks
|
|
|
|
gettotalrefcount = sys.gettotalrefcount
|
2023-04-22 16:39:37 -03:00
|
|
|
getunicodeinternedsize = sys.getunicodeinternedsize
|
2020-08-07 18:55:35 -03:00
|
|
|
fd_count = os_helper.fd_count
|
2015-09-30 08:51:17 -03:00
|
|
|
# initialize variables to make pyflakes quiet
|
2023-04-22 16:39:37 -03:00
|
|
|
rc_before = alloc_before = fd_before = interned_before = 0
|
2019-04-09 09:23:47 -03:00
|
|
|
|
2023-09-09 06:18:14 -03:00
|
|
|
if not quiet:
|
2024-02-27 04:51:17 -04:00
|
|
|
print("beginning", repcount, "repetitions. Showing number of leaks "
|
|
|
|
"(. for 0 or less, X for 10 or more)",
|
|
|
|
file=sys.stderr)
|
|
|
|
numbers = ("1234567890"*(repcount//10 + 1))[:repcount]
|
|
|
|
numbers = numbers[:warmups] + ':' + numbers[warmups:]
|
|
|
|
print(numbers, file=sys.stderr, flush=True)
|
2019-04-09 09:23:47 -03:00
|
|
|
|
2023-09-02 13:09:36 -03:00
|
|
|
results = None
|
2019-04-09 13:26:16 -03:00
|
|
|
dash_R_cleanup(fs, ps, pic, zdc, abcs)
|
2021-09-01 12:45:27 -03:00
|
|
|
support.gc_collect()
|
2019-04-09 13:26:16 -03:00
|
|
|
|
2019-04-09 09:23:47 -03:00
|
|
|
for i in rep_range:
|
2024-01-21 06:25:15 -04:00
|
|
|
current = refleak_helper._hunting_for_refleaks
|
|
|
|
refleak_helper._hunting_for_refleaks = True
|
|
|
|
try:
|
|
|
|
results = test_func()
|
|
|
|
finally:
|
|
|
|
refleak_helper._hunting_for_refleaks = current
|
2021-09-01 12:45:27 -03:00
|
|
|
|
2019-04-09 09:23:47 -03:00
|
|
|
dash_R_cleanup(fs, ps, pic, zdc, abcs)
|
2021-09-01 12:45:27 -03:00
|
|
|
support.gc_collect()
|
2019-04-09 09:23:47 -03:00
|
|
|
|
2023-04-22 16:39:37 -03:00
|
|
|
# Read memory statistics immediately after the garbage collection.
|
|
|
|
# Also, readjust the reference counts and alloc blocks by ignoring
|
|
|
|
# any strings that might have been interned during test_func. These
|
|
|
|
# strings will be deallocated at runtime shutdown
|
|
|
|
interned_after = getunicodeinternedsize()
|
|
|
|
alloc_after = getallocatedblocks() - interned_after
|
|
|
|
rc_after = gettotalrefcount() - interned_after * 2
|
2019-04-09 09:23:47 -03:00
|
|
|
fd_after = fd_count()
|
|
|
|
|
|
|
|
rc_deltas[i] = get_pooled_int(rc_after - rc_before)
|
|
|
|
alloc_deltas[i] = get_pooled_int(alloc_after - alloc_before)
|
|
|
|
fd_deltas[i] = get_pooled_int(fd_after - fd_before)
|
|
|
|
|
2024-02-27 04:51:17 -04:00
|
|
|
if not quiet:
|
|
|
|
# use max, not sum, so total_leaks is one of the pooled ints
|
|
|
|
total_leaks = max(rc_deltas[i], alloc_deltas[i], fd_deltas[i])
|
|
|
|
if total_leaks <= 0:
|
|
|
|
symbol = '.'
|
|
|
|
elif total_leaks < 10:
|
|
|
|
symbol = (
|
|
|
|
'.', '1', '2', '3', '4', '5', '6', '7', '8', '9',
|
|
|
|
)[total_leaks]
|
|
|
|
else:
|
|
|
|
symbol = 'X'
|
|
|
|
if i == warmups:
|
|
|
|
print(' ', end='', file=sys.stderr, flush=True)
|
|
|
|
print(symbol, end='', file=sys.stderr, flush=True)
|
|
|
|
del total_leaks
|
|
|
|
del symbol
|
|
|
|
|
2015-10-02 19:20:56 -03:00
|
|
|
alloc_before = alloc_after
|
|
|
|
rc_before = rc_after
|
|
|
|
fd_before = fd_after
|
2023-04-22 16:39:37 -03:00
|
|
|
interned_before = interned_after
|
2019-04-09 09:23:47 -03:00
|
|
|
|
2023-09-09 06:18:14 -03:00
|
|
|
if not quiet:
|
2019-04-09 09:23:47 -03:00
|
|
|
print(file=sys.stderr)
|
2017-06-26 21:02:04 -03:00
|
|
|
|
2015-09-26 05:38:01 -03:00
|
|
|
# These checkers return False on success, True on failure
|
|
|
|
def check_rc_deltas(deltas):
|
2021-10-06 20:13:48 -03:00
|
|
|
# Checker for reference counters and memory blocks.
|
2017-06-29 05:32:49 -03:00
|
|
|
#
|
2017-06-26 21:02:04 -03:00
|
|
|
# bpo-30776: Try to ignore false positives:
|
|
|
|
#
|
|
|
|
# [3, 0, 0]
|
|
|
|
# [0, 1, 0]
|
|
|
|
# [8, -8, 1]
|
|
|
|
#
|
|
|
|
# Expected leaks:
|
|
|
|
#
|
|
|
|
# [5, 5, 6]
|
|
|
|
# [10, 1, 1]
|
|
|
|
return all(delta >= 1 for delta in deltas)
|
|
|
|
|
|
|
|
def check_fd_deltas(deltas):
|
|
|
|
return any(deltas)
|
|
|
|
|
2015-09-26 05:38:01 -03:00
|
|
|
failed = False
|
|
|
|
for deltas, item_name, checker in [
|
|
|
|
(rc_deltas, 'references', check_rc_deltas),
|
2017-06-29 05:32:49 -03:00
|
|
|
(alloc_deltas, 'memory blocks', check_rc_deltas),
|
2017-06-26 21:02:04 -03:00
|
|
|
(fd_deltas, 'file descriptors', check_fd_deltas)
|
|
|
|
]:
|
|
|
|
# ignore warmup runs
|
2023-09-09 06:18:14 -03:00
|
|
|
deltas = deltas[warmups:]
|
2024-02-27 04:51:17 -04:00
|
|
|
failing = checker(deltas)
|
|
|
|
suspicious = any(deltas)
|
|
|
|
if failing or suspicious:
|
2015-09-26 05:38:01 -03:00
|
|
|
msg = '%s leaked %s %s, sum=%s' % (
|
2019-04-09 09:23:47 -03:00
|
|
|
test_name, deltas, item_name, sum(deltas))
|
2024-02-27 04:51:17 -04:00
|
|
|
print(msg, end='', file=sys.stderr)
|
|
|
|
if failing:
|
|
|
|
print(file=sys.stderr, flush=True)
|
|
|
|
with open(filename, "a", encoding="utf-8") as refrep:
|
|
|
|
print(msg, file=refrep)
|
|
|
|
refrep.flush()
|
|
|
|
failed = True
|
|
|
|
else:
|
|
|
|
print(' (this is fine)', file=sys.stderr, flush=True)
|
2023-09-02 13:09:36 -03:00
|
|
|
return (failed, results)
|
2015-09-26 05:38:01 -03:00
|
|
|
|
|
|
|
|
|
|
|
def dash_R_cleanup(fs, ps, pic, zdc, abcs):
|
2019-04-09 09:23:47 -03:00
|
|
|
import copyreg
|
2016-11-11 05:46:44 -04:00
|
|
|
import collections.abc
|
2015-09-26 05:38:01 -03:00
|
|
|
|
|
|
|
# Restore some original values.
|
|
|
|
warnings.filters[:] = fs
|
|
|
|
copyreg.dispatch_table.clear()
|
|
|
|
copyreg.dispatch_table.update(ps)
|
|
|
|
sys.path_importer_cache.clear()
|
|
|
|
sys.path_importer_cache.update(pic)
|
|
|
|
try:
|
|
|
|
import zipimport
|
|
|
|
except ImportError:
|
|
|
|
pass # Run unmodified on platforms without zipimport support
|
|
|
|
else:
|
|
|
|
zipimport._zip_directory_cache.clear()
|
|
|
|
zipimport._zip_directory_cache.update(zdc)
|
|
|
|
|
|
|
|
# Clear ABC registries, restoring previously saved ABC registries.
|
2023-05-12 03:01:31 -03:00
|
|
|
# ignore deprecation warning for collections.abc.ByteString
|
2023-05-13 05:55:35 -03:00
|
|
|
abs_classes = [getattr(collections.abc, a) for a in collections.abc.__all__]
|
2017-03-05 14:15:20 -04:00
|
|
|
abs_classes = filter(isabstract, abs_classes)
|
|
|
|
for abc in abs_classes:
|
2015-09-26 05:38:01 -03:00
|
|
|
for obj in abc.__subclasses__() + [abc]:
|
2018-02-18 08:41:58 -04:00
|
|
|
for ref in abcs.get(obj, set()):
|
|
|
|
if ref() is not None:
|
|
|
|
obj.register(ref())
|
|
|
|
obj._abc_caches_clear()
|
2015-09-26 05:38:01 -03:00
|
|
|
|
2021-09-01 12:45:27 -03:00
|
|
|
# Clear caches
|
2016-11-11 05:46:44 -04:00
|
|
|
clear_caches()
|
|
|
|
|
2024-02-12 05:04:36 -04:00
|
|
|
# Clear other caches last (previous function calls can re-populate them):
|
|
|
|
sys._clear_internal_caches()
|
2021-09-01 12:45:27 -03:00
|
|
|
|
2016-11-11 05:46:44 -04:00
|
|
|
|
2015-09-26 05:38:01 -03:00
|
|
|
def warm_caches():
|
|
|
|
# char cache
|
|
|
|
s = bytes(range(256))
|
|
|
|
for i in range(256):
|
|
|
|
s[i:i+1]
|
|
|
|
# unicode cache
|
2015-09-30 08:51:17 -03:00
|
|
|
[chr(i) for i in range(256)]
|
2015-09-26 05:38:01 -03:00
|
|
|
# int cache
|
2015-09-30 08:51:17 -03:00
|
|
|
list(range(-5, 257))
|