2017-08-17 11:40:51 -03:00
|
|
|
"""
|
2017-11-05 09:37:50 -04:00
|
|
|
Collect various information about Python to help debugging test failures.
|
2017-08-17 11:40:51 -03:00
|
|
|
"""
|
|
|
|
from __future__ import print_function
|
2017-09-19 11:37:24 -03:00
|
|
|
import errno
|
2017-08-17 11:40:51 -03:00
|
|
|
import re
|
|
|
|
import sys
|
|
|
|
import traceback
|
2022-03-22 07:04:36 -03:00
|
|
|
import unittest
|
2019-01-07 20:27:27 -04:00
|
|
|
import warnings
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
|
2022-05-26 11:07:01 -03:00
|
|
|
MS_WINDOWS = (sys.platform == 'win32')
|
|
|
|
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
def normalize_text(text):
|
|
|
|
if text is None:
|
|
|
|
return None
|
|
|
|
text = str(text)
|
|
|
|
text = re.sub(r'\s+', ' ', text)
|
|
|
|
return text.strip()
|
|
|
|
|
|
|
|
|
|
|
|
class PythonInfo:
|
|
|
|
def __init__(self):
|
|
|
|
self.info = {}
|
|
|
|
|
|
|
|
def add(self, key, value):
|
|
|
|
if key in self.info:
|
|
|
|
raise ValueError("duplicate key: %r" % key)
|
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
if value is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not isinstance(value, int):
|
|
|
|
if not isinstance(value, str):
|
|
|
|
# convert other objects like sys.flags to string
|
|
|
|
value = str(value)
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
value = value.strip()
|
|
|
|
if not value:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.info[key] = value
|
|
|
|
|
|
|
|
def get_infos(self):
|
|
|
|
"""
|
2017-11-05 09:37:50 -04:00
|
|
|
Get information as a key:value dictionary where values are strings.
|
2017-08-17 11:40:51 -03:00
|
|
|
"""
|
|
|
|
return {key: str(value) for key, value in self.info.items()}
|
|
|
|
|
|
|
|
|
|
|
|
def copy_attributes(info_add, obj, name_fmt, attributes, *, formatter=None):
|
|
|
|
for attr in attributes:
|
|
|
|
value = getattr(obj, attr, None)
|
|
|
|
if value is None:
|
|
|
|
continue
|
|
|
|
name = name_fmt % attr
|
|
|
|
if formatter is not None:
|
|
|
|
value = formatter(attr, value)
|
|
|
|
info_add(name, value)
|
|
|
|
|
|
|
|
|
2017-11-29 12:20:38 -04:00
|
|
|
def copy_attr(info_add, name, mod, attr_name):
|
|
|
|
try:
|
|
|
|
value = getattr(mod, attr_name)
|
|
|
|
except AttributeError:
|
|
|
|
return
|
|
|
|
info_add(name, value)
|
|
|
|
|
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
def call_func(info_add, name, mod, func_name, *, formatter=None):
|
|
|
|
try:
|
|
|
|
func = getattr(mod, func_name)
|
|
|
|
except AttributeError:
|
|
|
|
return
|
|
|
|
value = func()
|
|
|
|
if formatter is not None:
|
|
|
|
value = formatter(value)
|
|
|
|
info_add(name, value)
|
2017-08-17 11:40:51 -03:00
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
|
|
|
|
def collect_sys(info_add):
|
2017-08-17 11:40:51 -03:00
|
|
|
attributes = (
|
2022-04-25 07:58:25 -03:00
|
|
|
'_emscripten_info',
|
2017-08-17 11:40:51 -03:00
|
|
|
'_framework',
|
2017-08-18 07:08:47 -03:00
|
|
|
'abiflags',
|
|
|
|
'api_version',
|
|
|
|
'builtin_module_names',
|
2017-08-17 11:40:51 -03:00
|
|
|
'byteorder',
|
2017-08-18 07:08:47 -03:00
|
|
|
'dont_write_bytecode',
|
2017-08-17 11:40:51 -03:00
|
|
|
'executable',
|
|
|
|
'flags',
|
2017-08-18 07:08:47 -03:00
|
|
|
'float_info',
|
|
|
|
'float_repr_style',
|
|
|
|
'hash_info',
|
|
|
|
'hexversion',
|
|
|
|
'implementation',
|
|
|
|
'int_info',
|
2017-08-17 11:40:51 -03:00
|
|
|
'maxsize',
|
|
|
|
'maxunicode',
|
2017-08-18 07:08:47 -03:00
|
|
|
'path',
|
|
|
|
'platform',
|
2021-09-09 06:02:48 -03:00
|
|
|
'platlibdir',
|
2017-08-18 07:08:47 -03:00
|
|
|
'prefix',
|
|
|
|
'thread_info',
|
2017-08-17 11:40:51 -03:00
|
|
|
'version',
|
2017-08-18 07:08:47 -03:00
|
|
|
'version_info',
|
|
|
|
'winver',
|
2017-08-17 11:40:51 -03:00
|
|
|
)
|
2017-08-18 07:08:47 -03:00
|
|
|
copy_attributes(info_add, sys, 'sys.%s', attributes)
|
|
|
|
|
|
|
|
call_func(info_add, 'sys.androidapilevel', sys, 'getandroidapilevel')
|
|
|
|
call_func(info_add, 'sys.windowsversion', sys, 'getwindowsversion')
|
2023-09-06 12:34:31 -03:00
|
|
|
call_func(info_add, 'sys.getrecursionlimit', sys, 'getrecursionlimit')
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
encoding = sys.getfilesystemencoding()
|
|
|
|
if hasattr(sys, 'getfilesystemencodeerrors'):
|
|
|
|
encoding = '%s/%s' % (encoding, sys.getfilesystemencodeerrors())
|
|
|
|
info_add('sys.filesystem_encoding', encoding)
|
|
|
|
|
|
|
|
for name in ('stdin', 'stdout', 'stderr'):
|
|
|
|
stream = getattr(sys, name)
|
|
|
|
if stream is None:
|
|
|
|
continue
|
|
|
|
encoding = getattr(stream, 'encoding', None)
|
|
|
|
if not encoding:
|
|
|
|
continue
|
|
|
|
errors = getattr(stream, 'errors', None)
|
|
|
|
if errors:
|
|
|
|
encoding = '%s/%s' % (encoding, errors)
|
|
|
|
info_add('sys.%s.encoding' % name, encoding)
|
|
|
|
|
2022-05-26 11:07:01 -03:00
|
|
|
# Were we compiled --with-pydebug?
|
2017-10-31 12:41:10 -03:00
|
|
|
Py_DEBUG = hasattr(sys, 'gettotalrefcount')
|
|
|
|
if Py_DEBUG:
|
|
|
|
text = 'Yes (sys.gettotalrefcount() present)'
|
|
|
|
else:
|
|
|
|
text = 'No (sys.gettotalrefcount() missing)'
|
2022-05-26 11:07:01 -03:00
|
|
|
info_add('build.Py_DEBUG', text)
|
|
|
|
|
|
|
|
# Were we compiled --with-trace-refs?
|
|
|
|
Py_TRACE_REFS = hasattr(sys, 'getobjects')
|
|
|
|
if Py_TRACE_REFS:
|
|
|
|
text = 'Yes (sys.getobjects() present)'
|
|
|
|
else:
|
|
|
|
text = 'No (sys.getobjects() missing)'
|
2022-06-03 10:46:41 -03:00
|
|
|
info_add('build.Py_TRACE_REFS', text)
|
2017-10-31 12:41:10 -03:00
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_platform(info_add):
|
|
|
|
import platform
|
|
|
|
|
|
|
|
arch = platform.architecture()
|
|
|
|
arch = ' '.join(filter(bool, arch))
|
|
|
|
info_add('platform.architecture', arch)
|
|
|
|
|
|
|
|
info_add('platform.python_implementation',
|
|
|
|
platform.python_implementation())
|
|
|
|
info_add('platform.platform',
|
|
|
|
platform.platform(aliased=True))
|
|
|
|
|
2018-12-05 18:21:54 -04:00
|
|
|
libc_ver = ('%s %s' % platform.libc_ver()).strip()
|
|
|
|
if libc_ver:
|
|
|
|
info_add('platform.libc_ver', libc_ver)
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_locale(info_add):
|
|
|
|
import locale
|
|
|
|
|
2022-04-21 22:39:24 -03:00
|
|
|
info_add('locale.getencoding', locale.getencoding())
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
|
2017-12-13 12:27:40 -04:00
|
|
|
def collect_builtins(info_add):
|
|
|
|
info_add('builtins.float.float_format', float.__getformat__("float"))
|
|
|
|
info_add('builtins.float.double_format', float.__getformat__("double"))
|
|
|
|
|
|
|
|
|
2019-06-25 08:37:27 -03:00
|
|
|
def collect_urandom(info_add):
|
|
|
|
import os
|
|
|
|
|
|
|
|
if hasattr(os, 'getrandom'):
|
|
|
|
# PEP 524: Check if system urandom is initialized
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
os.getrandom(1, os.GRND_NONBLOCK)
|
|
|
|
state = 'ready (initialized)'
|
|
|
|
except BlockingIOError as exc:
|
|
|
|
state = 'not seeded yet (%s)' % exc
|
|
|
|
info_add('os.getrandom', state)
|
|
|
|
except OSError as exc:
|
|
|
|
# Python was compiled on a more recent Linux version
|
|
|
|
# than the current Linux kernel: ignore OSError(ENOSYS)
|
|
|
|
if exc.errno != errno.ENOSYS:
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
def collect_os(info_add):
|
|
|
|
import os
|
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
def format_attr(attr, value):
|
|
|
|
if attr in ('supports_follow_symlinks', 'supports_fd',
|
|
|
|
'supports_effective_ids'):
|
|
|
|
return str(sorted(func.__name__ for func in value))
|
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
|
|
|
attributes = (
|
|
|
|
'name',
|
|
|
|
'supports_bytes_environ',
|
|
|
|
'supports_effective_ids',
|
|
|
|
'supports_fd',
|
|
|
|
'supports_follow_symlinks',
|
|
|
|
)
|
|
|
|
copy_attributes(info_add, os, 'os.%s', attributes, formatter=format_attr)
|
2017-08-17 11:40:51 -03:00
|
|
|
|
2020-01-27 13:06:42 -04:00
|
|
|
for func in (
|
|
|
|
'cpu_count',
|
|
|
|
'getcwd',
|
|
|
|
'getegid',
|
|
|
|
'geteuid',
|
|
|
|
'getgid',
|
|
|
|
'getloadavg',
|
|
|
|
'getresgid',
|
|
|
|
'getresuid',
|
|
|
|
'getuid',
|
|
|
|
'uname',
|
|
|
|
):
|
|
|
|
call_func(info_add, 'os.%s' % func, os, func)
|
2017-08-17 11:40:51 -03:00
|
|
|
|
2017-11-29 12:20:38 -04:00
|
|
|
def format_groups(groups):
|
|
|
|
return ', '.join(map(str, groups))
|
|
|
|
|
2019-06-25 08:37:27 -03:00
|
|
|
call_func(info_add, 'os.getgroups', os, 'getgroups', formatter=format_groups)
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
if hasattr(os, 'getlogin'):
|
|
|
|
try:
|
|
|
|
login = os.getlogin()
|
|
|
|
except OSError:
|
|
|
|
# getlogin() fails with "OSError: [Errno 25] Inappropriate ioctl
|
|
|
|
# for device" on Travis CI
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
info_add("os.login", login)
|
|
|
|
|
2018-11-26 12:03:16 -04:00
|
|
|
# Environment variables used by the stdlib and tests. Don't log the full
|
|
|
|
# environment: filter to list to not leak sensitive information.
|
|
|
|
#
|
|
|
|
# HTTP_PROXY is not logged because it can contain a password.
|
|
|
|
ENV_VARS = frozenset((
|
|
|
|
"APPDATA",
|
|
|
|
"AR",
|
|
|
|
"ARCHFLAGS",
|
|
|
|
"ARFLAGS",
|
|
|
|
"AUDIODEV",
|
2017-08-17 11:40:51 -03:00
|
|
|
"CC",
|
2018-11-26 12:03:16 -04:00
|
|
|
"CFLAGS",
|
|
|
|
"COLUMNS",
|
|
|
|
"COMPUTERNAME",
|
2017-08-17 11:40:51 -03:00
|
|
|
"COMSPEC",
|
2018-11-26 12:03:16 -04:00
|
|
|
"CPP",
|
|
|
|
"CPPFLAGS",
|
2017-08-17 11:40:51 -03:00
|
|
|
"DISPLAY",
|
2018-11-26 12:03:16 -04:00
|
|
|
"DISTUTILS_DEBUG",
|
2017-08-17 11:40:51 -03:00
|
|
|
"DISTUTILS_USE_SDK",
|
|
|
|
"DYLD_LIBRARY_PATH",
|
2018-11-26 12:03:16 -04:00
|
|
|
"ENSUREPIP_OPTIONS",
|
|
|
|
"HISTORY_FILE",
|
2017-08-17 11:40:51 -03:00
|
|
|
"HOME",
|
|
|
|
"HOMEDRIVE",
|
|
|
|
"HOMEPATH",
|
2018-11-26 12:03:16 -04:00
|
|
|
"IDLESTARTUP",
|
2017-08-17 11:40:51 -03:00
|
|
|
"LANG",
|
2018-11-26 12:03:16 -04:00
|
|
|
"LDFLAGS",
|
|
|
|
"LDSHARED",
|
2017-08-17 11:40:51 -03:00
|
|
|
"LD_LIBRARY_PATH",
|
2018-11-26 12:03:16 -04:00
|
|
|
"LINES",
|
2017-08-17 11:40:51 -03:00
|
|
|
"MACOSX_DEPLOYMENT_TARGET",
|
2018-11-26 12:03:16 -04:00
|
|
|
"MAILCAPS",
|
2017-08-17 11:40:51 -03:00
|
|
|
"MAKEFLAGS",
|
2018-11-26 12:03:16 -04:00
|
|
|
"MIXERDEV",
|
2017-08-17 11:40:51 -03:00
|
|
|
"MSSDK",
|
|
|
|
"PATH",
|
2018-11-26 12:03:16 -04:00
|
|
|
"PATHEXT",
|
|
|
|
"PIP_CONFIG_FILE",
|
|
|
|
"PLAT",
|
|
|
|
"POSIXLY_CORRECT",
|
|
|
|
"PY_SAX_PARSER",
|
|
|
|
"ProgramFiles",
|
|
|
|
"ProgramFiles(x86)",
|
|
|
|
"RUNNING_ON_VALGRIND",
|
2017-08-17 11:40:51 -03:00
|
|
|
"SDK_TOOLS_BIN",
|
2018-11-26 12:03:16 -04:00
|
|
|
"SERVER_SOFTWARE",
|
2017-08-17 11:40:51 -03:00
|
|
|
"SHELL",
|
2018-11-26 12:03:16 -04:00
|
|
|
"SOURCE_DATE_EPOCH",
|
|
|
|
"SYSTEMROOT",
|
2017-08-17 11:40:51 -03:00
|
|
|
"TEMP",
|
|
|
|
"TERM",
|
2018-11-26 12:03:16 -04:00
|
|
|
"TILE_LIBRARY",
|
2017-08-17 11:40:51 -03:00
|
|
|
"TMP",
|
|
|
|
"TMPDIR",
|
2018-12-04 20:58:31 -04:00
|
|
|
"TRAVIS",
|
2018-11-26 12:03:16 -04:00
|
|
|
"TZ",
|
2017-08-17 11:40:51 -03:00
|
|
|
"USERPROFILE",
|
2018-11-26 12:03:16 -04:00
|
|
|
"VIRTUAL_ENV",
|
2017-08-17 11:40:51 -03:00
|
|
|
"WAYLAND_DISPLAY",
|
2018-11-26 12:03:16 -04:00
|
|
|
"WINDIR",
|
|
|
|
"_PYTHON_HOST_PLATFORM",
|
|
|
|
"_PYTHON_PROJECT_BASE",
|
|
|
|
"_PYTHON_SYSCONFIGDATA_NAME",
|
|
|
|
"__PYVENV_LAUNCHER__",
|
2023-08-22 20:39:50 -03:00
|
|
|
|
|
|
|
# Sanitizer options
|
|
|
|
"ASAN_OPTIONS",
|
|
|
|
"LSAN_OPTIONS",
|
|
|
|
"MSAN_OPTIONS",
|
|
|
|
"TSAN_OPTIONS",
|
|
|
|
"UBSAN_OPTIONS",
|
2018-11-26 12:03:16 -04:00
|
|
|
))
|
2017-08-17 11:40:51 -03:00
|
|
|
for name, value in os.environ.items():
|
|
|
|
uname = name.upper()
|
2017-11-29 12:20:38 -04:00
|
|
|
if (uname in ENV_VARS
|
|
|
|
# Copy PYTHON* and LC_* variables
|
|
|
|
or uname.startswith(("PYTHON", "LC_"))
|
2017-08-17 11:40:51 -03:00
|
|
|
# Visual Studio: VS140COMNTOOLS
|
|
|
|
or (uname.startswith("VS") and uname.endswith("COMNTOOLS"))):
|
|
|
|
info_add('os.environ[%s]' % name, value)
|
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
if hasattr(os, 'umask'):
|
|
|
|
mask = os.umask(0)
|
|
|
|
os.umask(mask)
|
2020-01-27 13:06:42 -04:00
|
|
|
info_add("os.umask", '0o%03o' % mask)
|
2017-08-18 07:08:47 -03:00
|
|
|
|
2019-06-25 08:37:27 -03:00
|
|
|
|
|
|
|
def collect_pwd(info_add):
|
|
|
|
try:
|
|
|
|
import pwd
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
import os
|
|
|
|
|
|
|
|
uid = os.getuid()
|
|
|
|
try:
|
|
|
|
entry = pwd.getpwuid(uid)
|
|
|
|
except KeyError:
|
|
|
|
entry = None
|
|
|
|
|
|
|
|
info_add('pwd.getpwuid(%s)'% uid,
|
|
|
|
entry if entry is not None else '<KeyError>')
|
|
|
|
|
|
|
|
if entry is None:
|
|
|
|
# there is nothing interesting to read if the current user identifier
|
|
|
|
# is not the password database
|
|
|
|
return
|
|
|
|
|
|
|
|
if hasattr(os, 'getgrouplist'):
|
|
|
|
groups = os.getgrouplist(entry.pw_name, entry.pw_gid)
|
|
|
|
groups = ', '.join(map(str, groups))
|
|
|
|
info_add('os.getgrouplist', groups)
|
2017-08-18 07:08:47 -03:00
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_readline(info_add):
|
|
|
|
try:
|
|
|
|
import readline
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
def format_attr(attr, value):
|
|
|
|
if isinstance(value, int):
|
|
|
|
return "%#x" % value
|
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
|
|
|
attributes = (
|
|
|
|
"_READLINE_VERSION",
|
|
|
|
"_READLINE_RUNTIME_VERSION",
|
|
|
|
"_READLINE_LIBRARY_VERSION",
|
|
|
|
)
|
|
|
|
copy_attributes(info_add, readline, 'readline.%s', attributes,
|
|
|
|
formatter=format_attr)
|
|
|
|
|
2018-06-01 06:04:29 -03:00
|
|
|
if not hasattr(readline, "_READLINE_LIBRARY_VERSION"):
|
|
|
|
# _READLINE_LIBRARY_VERSION has been added to CPython 3.7
|
|
|
|
doc = getattr(readline, '__doc__', '')
|
|
|
|
if 'libedit readline' in doc:
|
|
|
|
info_add('readline.library', 'libedit readline')
|
|
|
|
elif 'GNU readline' in doc:
|
|
|
|
info_add('readline.library', 'GNU readline')
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_gdb(info_add):
|
|
|
|
import subprocess
|
|
|
|
|
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(["gdb", "-nx", "--version"],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
universal_newlines=True)
|
|
|
|
version = proc.communicate()[0]
|
2020-04-29 12:11:48 -03:00
|
|
|
if proc.returncode:
|
|
|
|
# ignore gdb failure: test_gdb will log the error
|
|
|
|
return
|
2017-08-17 11:40:51 -03:00
|
|
|
except OSError:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Only keep the first line
|
|
|
|
version = version.splitlines()[0]
|
|
|
|
info_add('gdb_version', version)
|
|
|
|
|
|
|
|
|
|
|
|
def collect_tkinter(info_add):
|
|
|
|
try:
|
|
|
|
import _tkinter
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
attributes = ('TK_VERSION', 'TCL_VERSION')
|
|
|
|
copy_attributes(info_add, _tkinter, 'tkinter.%s', attributes)
|
|
|
|
|
|
|
|
try:
|
|
|
|
import tkinter
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
tcl = tkinter.Tcl()
|
|
|
|
patchlevel = tcl.call('info', 'patchlevel')
|
|
|
|
info_add('tkinter.info_patchlevel', patchlevel)
|
|
|
|
|
|
|
|
|
|
|
|
def collect_time(info_add):
|
|
|
|
import time
|
|
|
|
|
2018-01-17 11:35:45 -04:00
|
|
|
info_add('time.time', time.time())
|
|
|
|
|
2017-08-18 07:08:47 -03:00
|
|
|
attributes = (
|
|
|
|
'altzone',
|
|
|
|
'daylight',
|
|
|
|
'timezone',
|
|
|
|
'tzname',
|
|
|
|
)
|
|
|
|
copy_attributes(info_add, time, 'time.%s', attributes)
|
|
|
|
|
2017-11-29 12:20:38 -04:00
|
|
|
if hasattr(time, 'get_clock_info'):
|
2019-01-07 20:27:27 -04:00
|
|
|
for clock in ('clock', 'monotonic', 'perf_counter',
|
|
|
|
'process_time', 'thread_time', 'time'):
|
|
|
|
try:
|
|
|
|
# prevent DeprecatingWarning on get_clock_info('clock')
|
|
|
|
with warnings.catch_warnings(record=True):
|
|
|
|
clock_info = time.get_clock_info(clock)
|
|
|
|
except ValueError:
|
|
|
|
# missing clock like time.thread_time()
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
info_add('time.get_clock_info(%s)' % clock, clock_info)
|
2018-01-17 11:35:45 -04:00
|
|
|
|
|
|
|
|
2022-01-20 22:30:20 -04:00
|
|
|
def collect_curses(info_add):
|
|
|
|
try:
|
|
|
|
import curses
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
copy_attr(info_add, 'curses.ncurses_version', curses, 'ncurses_version')
|
|
|
|
|
|
|
|
|
2018-01-17 11:35:45 -04:00
|
|
|
def collect_datetime(info_add):
|
|
|
|
try:
|
|
|
|
import datetime
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
info_add('datetime.datetime.now', datetime.datetime.now())
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
|
|
|
|
def collect_sysconfig(info_add):
|
2022-05-26 11:07:01 -03:00
|
|
|
# On Windows, sysconfig is not reliable to get macros used
|
|
|
|
# to build Python
|
|
|
|
if MS_WINDOWS:
|
|
|
|
return
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
import sysconfig
|
|
|
|
|
|
|
|
for name in (
|
|
|
|
'ABIFLAGS',
|
|
|
|
'ANDROID_API_LEVEL',
|
|
|
|
'CC',
|
|
|
|
'CCSHARED',
|
|
|
|
'CFLAGS',
|
|
|
|
'CFLAGSFORSHARED',
|
|
|
|
'CONFIG_ARGS',
|
|
|
|
'HOST_GNU_TYPE',
|
|
|
|
'MACHDEP',
|
|
|
|
'MULTIARCH',
|
|
|
|
'OPT',
|
|
|
|
'PY_CFLAGS',
|
|
|
|
'PY_CFLAGS_NODIST',
|
2018-12-19 13:19:01 -04:00
|
|
|
'PY_CORE_LDFLAGS',
|
2017-11-29 12:20:38 -04:00
|
|
|
'PY_LDFLAGS',
|
2018-12-19 13:19:01 -04:00
|
|
|
'PY_LDFLAGS_NODIST',
|
|
|
|
'PY_STDMODULE_CFLAGS',
|
2017-08-17 11:40:51 -03:00
|
|
|
'Py_DEBUG',
|
|
|
|
'Py_ENABLE_SHARED',
|
2023-08-21 17:16:23 -03:00
|
|
|
'Py_NOGIL',
|
2017-08-17 11:40:51 -03:00
|
|
|
'SHELL',
|
|
|
|
'SOABI',
|
|
|
|
'prefix',
|
|
|
|
):
|
|
|
|
value = sysconfig.get_config_var(name)
|
|
|
|
if name == 'ANDROID_API_LEVEL' and not value:
|
|
|
|
# skip ANDROID_API_LEVEL=0
|
|
|
|
continue
|
|
|
|
value = normalize_text(value)
|
|
|
|
info_add('sysconfig[%s]' % name, value)
|
|
|
|
|
2022-05-26 11:07:01 -03:00
|
|
|
PY_CFLAGS = sysconfig.get_config_var('PY_CFLAGS')
|
|
|
|
NDEBUG = (PY_CFLAGS and '-DNDEBUG' in PY_CFLAGS)
|
|
|
|
if NDEBUG:
|
|
|
|
text = 'ignore assertions (macro defined)'
|
|
|
|
else:
|
|
|
|
text= 'build assertions (macro not defined)'
|
|
|
|
info_add('build.NDEBUG',text)
|
|
|
|
|
|
|
|
for name in (
|
|
|
|
'WITH_DOC_STRINGS',
|
|
|
|
'WITH_DTRACE',
|
|
|
|
'WITH_FREELISTS',
|
|
|
|
'WITH_PYMALLOC',
|
|
|
|
'WITH_VALGRIND',
|
|
|
|
):
|
|
|
|
value = sysconfig.get_config_var(name)
|
|
|
|
if value:
|
|
|
|
text = 'Yes'
|
|
|
|
else:
|
|
|
|
text = 'No'
|
|
|
|
info_add(f'build.{name}', text)
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_ssl(info_add):
|
2019-10-02 12:52:35 -03:00
|
|
|
import os
|
2017-08-17 11:40:51 -03:00
|
|
|
try:
|
|
|
|
import ssl
|
|
|
|
except ImportError:
|
|
|
|
return
|
2019-10-02 12:52:35 -03:00
|
|
|
try:
|
|
|
|
import _ssl
|
|
|
|
except ImportError:
|
|
|
|
_ssl = None
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def format_attr(attr, value):
|
|
|
|
if attr.startswith('OP_'):
|
|
|
|
return '%#8x' % value
|
|
|
|
else:
|
2017-08-18 07:08:47 -03:00
|
|
|
return value
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
attributes = (
|
|
|
|
'OPENSSL_VERSION',
|
|
|
|
'OPENSSL_VERSION_INFO',
|
|
|
|
'HAS_SNI',
|
|
|
|
'OP_ALL',
|
|
|
|
'OP_NO_TLSv1_1',
|
|
|
|
)
|
|
|
|
copy_attributes(info_add, ssl, 'ssl.%s', attributes, formatter=format_attr)
|
|
|
|
|
2019-10-02 12:52:35 -03:00
|
|
|
for name, ctx in (
|
2021-04-19 02:27:10 -03:00
|
|
|
('SSLContext', ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)),
|
2019-10-02 12:52:35 -03:00
|
|
|
('default_https_context', ssl._create_default_https_context()),
|
|
|
|
('stdlib_context', ssl._create_stdlib_context()),
|
|
|
|
):
|
|
|
|
attributes = (
|
|
|
|
'minimum_version',
|
|
|
|
'maximum_version',
|
|
|
|
'protocol',
|
|
|
|
'options',
|
|
|
|
'verify_mode',
|
|
|
|
)
|
|
|
|
copy_attributes(info_add, ctx, f'ssl.{name}.%s', attributes)
|
|
|
|
|
|
|
|
env_names = ["OPENSSL_CONF", "SSLKEYLOGFILE"]
|
|
|
|
if _ssl is not None and hasattr(_ssl, 'get_default_verify_paths'):
|
|
|
|
parts = _ssl.get_default_verify_paths()
|
|
|
|
env_names.extend((parts[0], parts[2]))
|
|
|
|
|
|
|
|
for name in env_names:
|
|
|
|
try:
|
|
|
|
value = os.environ[name]
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
info_add('ssl.environ[%s]' % name, value)
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
def collect_socket(info_add):
|
2022-05-24 21:11:51 -03:00
|
|
|
try:
|
|
|
|
import socket
|
|
|
|
except ImportError:
|
|
|
|
return
|
2017-08-17 11:40:51 -03:00
|
|
|
|
2022-05-19 07:43:16 -03:00
|
|
|
try:
|
|
|
|
hostname = socket.gethostname()
|
2022-07-27 15:28:06 -03:00
|
|
|
except (OSError, AttributeError):
|
|
|
|
# WASI SDK 16.0 does not have gethostname(2).
|
2022-05-19 07:43:16 -03:00
|
|
|
if sys.platform != "wasi":
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
info_add('socket.hostname', hostname)
|
2017-08-17 11:40:51 -03:00
|
|
|
|
|
|
|
|
|
|
|
def collect_sqlite(info_add):
|
|
|
|
try:
|
|
|
|
import sqlite3
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
2022-06-26 07:10:28 -03:00
|
|
|
attributes = ('sqlite_version',)
|
2017-08-17 11:40:51 -03:00
|
|
|
copy_attributes(info_add, sqlite3, 'sqlite3.%s', attributes)
|
|
|
|
|
|
|
|
|
|
|
|
def collect_zlib(info_add):
|
|
|
|
try:
|
|
|
|
import zlib
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
attributes = ('ZLIB_VERSION', 'ZLIB_RUNTIME_VERSION')
|
|
|
|
copy_attributes(info_add, zlib, 'zlib.%s', attributes)
|
|
|
|
|
|
|
|
|
2017-08-17 17:13:11 -03:00
|
|
|
def collect_expat(info_add):
|
|
|
|
try:
|
|
|
|
from xml.parsers import expat
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
attributes = ('EXPAT_VERSION',)
|
|
|
|
copy_attributes(info_add, expat, 'expat.%s', attributes)
|
|
|
|
|
|
|
|
|
|
|
|
def collect_decimal(info_add):
|
|
|
|
try:
|
|
|
|
import _decimal
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
attributes = ('__libmpdec_version__',)
|
|
|
|
copy_attributes(info_add, _decimal, '_decimal.%s', attributes)
|
|
|
|
|
|
|
|
|
2017-11-29 12:20:38 -04:00
|
|
|
def collect_testcapi(info_add):
|
|
|
|
try:
|
2023-07-24 15:48:06 -03:00
|
|
|
import _testinternalcapi
|
2017-11-29 12:20:38 -04:00
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
2023-07-24 15:48:06 -03:00
|
|
|
call_func(info_add, 'pymem.allocator', _testinternalcapi, 'pymem_getallocatorsname')
|
2017-11-29 12:20:38 -04:00
|
|
|
|
|
|
|
|
2017-12-13 12:27:40 -04:00
|
|
|
def collect_resource(info_add):
|
|
|
|
try:
|
|
|
|
import resource
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
limits = [attr for attr in dir(resource) if attr.startswith('RLIMIT_')]
|
|
|
|
for name in limits:
|
|
|
|
key = getattr(resource, name)
|
|
|
|
value = resource.getrlimit(key)
|
|
|
|
info_add('resource.%s' % name, value)
|
2019-01-21 05:24:12 -04:00
|
|
|
|
|
|
|
call_func(info_add, 'resource.pagesize', resource, 'getpagesize')
|
2017-12-13 12:27:40 -04:00
|
|
|
|
|
|
|
|
|
|
|
def collect_test_socket(info_add):
|
|
|
|
try:
|
|
|
|
from test import test_socket
|
2022-03-22 07:04:36 -03:00
|
|
|
except (ImportError, unittest.SkipTest):
|
2017-12-13 12:27:40 -04:00
|
|
|
return
|
|
|
|
|
|
|
|
# all check attributes like HAVE_SOCKET_CAN
|
|
|
|
attributes = [name for name in dir(test_socket)
|
|
|
|
if name.startswith('HAVE_')]
|
|
|
|
copy_attributes(info_add, test_socket, 'test_socket.%s', attributes)
|
|
|
|
|
|
|
|
|
|
|
|
def collect_test_support(info_add):
|
|
|
|
try:
|
|
|
|
from test import support
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
attributes = ('IPV6_ENABLED',)
|
|
|
|
copy_attributes(info_add, support, 'test_support.%s', attributes)
|
|
|
|
|
|
|
|
call_func(info_add, 'test_support._is_gui_available', support, '_is_gui_available')
|
|
|
|
call_func(info_add, 'test_support.python_is_optimized', support, 'python_is_optimized')
|
|
|
|
|
2022-05-26 11:07:01 -03:00
|
|
|
info_add('test_support.check_sanitizer(address=True)',
|
|
|
|
support.check_sanitizer(address=True))
|
|
|
|
info_add('test_support.check_sanitizer(memory=True)',
|
|
|
|
support.check_sanitizer(memory=True))
|
|
|
|
info_add('test_support.check_sanitizer(ub=True)',
|
|
|
|
support.check_sanitizer(ub=True))
|
|
|
|
|
2017-12-13 12:27:40 -04:00
|
|
|
|
2018-05-31 19:33:03 -03:00
|
|
|
def collect_cc(info_add):
|
|
|
|
import subprocess
|
|
|
|
import sysconfig
|
|
|
|
|
|
|
|
CC = sysconfig.get_config_var('CC')
|
|
|
|
if not CC:
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
import shlex
|
|
|
|
args = shlex.split(CC)
|
|
|
|
except ImportError:
|
|
|
|
args = CC.split()
|
|
|
|
args.append('--version')
|
2019-04-29 09:53:30 -03:00
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(args,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
universal_newlines=True)
|
|
|
|
except OSError:
|
|
|
|
# Cannot run the compiler, for example when Python has been
|
|
|
|
# cross-compiled and installed on the target platform where the
|
|
|
|
# compiler is missing.
|
|
|
|
return
|
|
|
|
|
2018-05-31 19:33:03 -03:00
|
|
|
stdout = proc.communicate()[0]
|
|
|
|
if proc.returncode:
|
|
|
|
# CC --version failed: ignore error
|
|
|
|
return
|
|
|
|
|
|
|
|
text = stdout.splitlines()[0]
|
|
|
|
text = normalize_text(text)
|
|
|
|
info_add('CC.version', text)
|
|
|
|
|
|
|
|
|
2018-06-19 18:29:22 -03:00
|
|
|
def collect_gdbm(info_add):
|
|
|
|
try:
|
2018-06-20 10:23:30 -03:00
|
|
|
from _gdbm import _GDBM_VERSION
|
2018-06-19 18:29:22 -03:00
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
2018-06-20 10:23:30 -03:00
|
|
|
info_add('gdbm.GDBM_VERSION', '.'.join(map(str, _GDBM_VERSION)))
|
2018-06-19 18:29:22 -03:00
|
|
|
|
|
|
|
|
2018-11-13 14:59:26 -04:00
|
|
|
def collect_get_config(info_add):
|
2019-04-30 21:30:12 -03:00
|
|
|
# Get global configuration variables, _PyPreConfig and _PyCoreConfig
|
2018-09-03 12:06:39 -03:00
|
|
|
try:
|
2019-04-18 06:37:26 -03:00
|
|
|
from _testinternalcapi import get_configs
|
2018-09-03 12:06:39 -03:00
|
|
|
except ImportError:
|
2018-11-13 19:24:28 -04:00
|
|
|
return
|
2018-09-03 12:06:39 -03:00
|
|
|
|
2019-03-25 19:19:57 -03:00
|
|
|
all_configs = get_configs()
|
|
|
|
for config_type in sorted(all_configs):
|
|
|
|
config = all_configs[config_type]
|
2018-11-13 14:59:26 -04:00
|
|
|
for key in sorted(config):
|
2019-03-25 19:19:57 -03:00
|
|
|
info_add('%s[%s]' % (config_type, key), repr(config[key]))
|
2018-09-03 12:06:39 -03:00
|
|
|
|
|
|
|
|
2019-01-15 19:02:35 -04:00
|
|
|
def collect_subprocess(info_add):
|
|
|
|
import subprocess
|
2019-01-16 18:38:06 -04:00
|
|
|
copy_attributes(info_add, subprocess, 'subprocess.%s', ('_USE_POSIX_SPAWN',))
|
2019-01-15 19:02:35 -04:00
|
|
|
|
|
|
|
|
2019-06-28 13:05:05 -03:00
|
|
|
def collect_windows(info_add):
|
|
|
|
try:
|
|
|
|
import ctypes
|
|
|
|
except ImportError:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not hasattr(ctypes, 'WinDLL'):
|
|
|
|
return
|
|
|
|
|
|
|
|
ntdll = ctypes.WinDLL('ntdll')
|
|
|
|
BOOLEAN = ctypes.c_ubyte
|
|
|
|
|
|
|
|
try:
|
|
|
|
RtlAreLongPathsEnabled = ntdll.RtlAreLongPathsEnabled
|
|
|
|
except AttributeError:
|
|
|
|
res = '<function not available>'
|
|
|
|
else:
|
|
|
|
RtlAreLongPathsEnabled.restype = BOOLEAN
|
|
|
|
RtlAreLongPathsEnabled.argtypes = ()
|
|
|
|
res = bool(RtlAreLongPathsEnabled())
|
|
|
|
info_add('windows.RtlAreLongPathsEnabled', res)
|
|
|
|
|
2019-09-24 21:54:25 -03:00
|
|
|
try:
|
|
|
|
import _winapi
|
|
|
|
dll_path = _winapi.GetModuleFileName(sys.dllhandle)
|
|
|
|
info_add('windows.dll_path', dll_path)
|
|
|
|
except (ImportError, AttributeError):
|
|
|
|
pass
|
|
|
|
|
2022-01-22 23:03:43 -04:00
|
|
|
import subprocess
|
|
|
|
try:
|
2022-01-25 15:02:23 -04:00
|
|
|
# When wmic.exe output is redirected to a pipe,
|
|
|
|
# it uses the OEM code page
|
2022-01-22 23:03:43 -04:00
|
|
|
proc = subprocess.Popen(["wmic", "os", "get", "Caption,Version", "/value"],
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
2022-01-25 15:02:23 -04:00
|
|
|
encoding="oem",
|
2022-01-22 23:03:43 -04:00
|
|
|
text=True)
|
|
|
|
output, stderr = proc.communicate()
|
|
|
|
if proc.returncode:
|
|
|
|
output = ""
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
for line in output.splitlines():
|
|
|
|
line = line.strip()
|
|
|
|
if line.startswith('Caption='):
|
|
|
|
line = line.removeprefix('Caption=').strip()
|
|
|
|
if line:
|
|
|
|
info_add('windows.version_caption', line)
|
|
|
|
elif line.startswith('Version='):
|
|
|
|
line = line.removeprefix('Version=').strip()
|
|
|
|
if line:
|
|
|
|
info_add('windows.version', line)
|
|
|
|
|
|
|
|
try:
|
|
|
|
proc = subprocess.Popen(["ver"], shell=True,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.PIPE,
|
|
|
|
text=True)
|
|
|
|
output = proc.communicate()[0]
|
|
|
|
if proc.returncode:
|
|
|
|
output = ""
|
|
|
|
except OSError:
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
output = output.strip()
|
|
|
|
line = output.splitlines()[0]
|
|
|
|
if line:
|
|
|
|
info_add('windows.ver', line)
|
|
|
|
|
2019-06-28 13:05:05 -03:00
|
|
|
|
2020-04-29 13:04:22 -03:00
|
|
|
def collect_fips(info_add):
|
|
|
|
try:
|
|
|
|
import _hashlib
|
|
|
|
except ImportError:
|
|
|
|
_hashlib = None
|
|
|
|
|
|
|
|
if _hashlib is not None:
|
|
|
|
call_func(info_add, 'fips.openssl_fips_mode', _hashlib, 'get_fips_mode')
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open("/proc/sys/crypto/fips_enabled", encoding="utf-8") as fp:
|
|
|
|
line = fp.readline().rstrip()
|
|
|
|
|
|
|
|
if line:
|
|
|
|
info_add('fips.linux_crypto_fips_enabled', line)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2017-08-17 11:40:51 -03:00
|
|
|
def collect_info(info):
|
|
|
|
error = False
|
|
|
|
info_add = info.add
|
|
|
|
|
|
|
|
for collect_func in (
|
2019-06-25 08:37:27 -03:00
|
|
|
# collect_urandom() must be the first, to check the getrandom() status.
|
|
|
|
# Other functions may block on os.urandom() indirectly and so change
|
|
|
|
# its state.
|
|
|
|
collect_urandom,
|
2017-08-17 11:40:51 -03:00
|
|
|
|
2017-12-13 12:27:40 -04:00
|
|
|
collect_builtins,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_cc,
|
2022-01-20 22:30:20 -04:00
|
|
|
collect_curses,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_datetime,
|
|
|
|
collect_decimal,
|
|
|
|
collect_expat,
|
2020-04-29 13:04:22 -03:00
|
|
|
collect_fips,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_gdb,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_gdbm,
|
|
|
|
collect_get_config,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_locale,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_os,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_platform,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_pwd,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_readline,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_resource,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_socket,
|
|
|
|
collect_sqlite,
|
|
|
|
collect_ssl,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_subprocess,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_sys,
|
|
|
|
collect_sysconfig,
|
2019-06-25 08:37:27 -03:00
|
|
|
collect_testcapi,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_time,
|
|
|
|
collect_tkinter,
|
2019-06-28 13:05:05 -03:00
|
|
|
collect_windows,
|
2017-08-17 11:40:51 -03:00
|
|
|
collect_zlib,
|
2017-12-13 12:27:40 -04:00
|
|
|
|
|
|
|
# Collecting from tests should be last as they have side effects.
|
|
|
|
collect_test_socket,
|
|
|
|
collect_test_support,
|
2017-08-17 11:40:51 -03:00
|
|
|
):
|
|
|
|
try:
|
|
|
|
collect_func(info_add)
|
2019-11-19 17:34:03 -04:00
|
|
|
except Exception:
|
2017-08-17 11:40:51 -03:00
|
|
|
error = True
|
|
|
|
print("ERROR: %s() failed" % (collect_func.__name__),
|
|
|
|
file=sys.stderr)
|
|
|
|
traceback.print_exc(file=sys.stderr)
|
|
|
|
print(file=sys.stderr)
|
|
|
|
sys.stderr.flush()
|
|
|
|
|
|
|
|
return error
|
|
|
|
|
|
|
|
|
|
|
|
def dump_info(info, file=None):
|
|
|
|
title = "Python debug information"
|
|
|
|
print(title)
|
|
|
|
print("=" * len(title))
|
|
|
|
print()
|
|
|
|
|
|
|
|
infos = info.get_infos()
|
|
|
|
infos = sorted(infos.items())
|
|
|
|
for key, value in infos:
|
|
|
|
value = value.replace("\n", " ")
|
|
|
|
print("%s: %s" % (key, value))
|
|
|
|
print()
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
info = PythonInfo()
|
|
|
|
error = collect_info(info)
|
|
|
|
dump_info(info)
|
|
|
|
|
|
|
|
if error:
|
|
|
|
print("Collection failed: exit with error", file=sys.stderr)
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|