bpo-31370: Remove support for threads-less builds (#3385)

* Remove Setup.config
* Always define WITH_THREAD for compatibility.
This commit is contained in:
Antoine Pitrou 2017-09-07 18:56:24 +02:00 committed by Victor Stinner
parent 1f06a680de
commit a6a4dc816d
135 changed files with 2472 additions and 4377 deletions

View File

@ -344,6 +344,9 @@ Build and C API Changes
download a copy of 32-bit Python for this purpose. (Contributed by Zachary
Ware in :issue:`30450`.)
* Support for building ``--without-threads`` is removed.
(Contributed by Antoine Pitrou in :issue:`31370`.).
Deprecated
==========

View File

@ -18,7 +18,7 @@
#error "Python's source code assumes C's unsigned char is an 8-bit type."
#endif
#if defined(__sgi) && defined(WITH_THREAD) && !defined(_SGI_MP_SOURCE)
#if defined(__sgi) && !defined(_SGI_MP_SOURCE)
#define _SGI_MP_SOURCE
#endif

View File

@ -183,8 +183,6 @@ PyAPI_FUNC(PyObject *) _PyEval_EvalFrameDefault(struct _frame *f, int exc);
PyAPI_FUNC(PyThreadState *) PyEval_SaveThread(void);
PyAPI_FUNC(void) PyEval_RestoreThread(PyThreadState *);
#ifdef WITH_THREAD
PyAPI_FUNC(int) PyEval_ThreadsInitialized(void);
PyAPI_FUNC(void) PyEval_InitThreads(void);
#ifndef Py_LIMITED_API
@ -213,15 +211,6 @@ PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc);
#define Py_END_ALLOW_THREADS PyEval_RestoreThread(_save); \
}
#else /* !WITH_THREAD */
#define Py_BEGIN_ALLOW_THREADS {
#define Py_BLOCK_THREADS
#define Py_UNBLOCK_THREADS
#define Py_END_ALLOW_THREADS }
#endif /* !WITH_THREAD */
#ifndef Py_LIMITED_API
PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *);
PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *);

View File

@ -100,13 +100,8 @@ PyAPI_FUNC(int) PyImport_ImportFrozenModule(
);
#ifndef Py_LIMITED_API
#ifdef WITH_THREAD
PyAPI_FUNC(void) _PyImport_AcquireLock(void);
PyAPI_FUNC(int) _PyImport_ReleaseLock(void);
#else
#define _PyImport_AcquireLock()
#define _PyImport_ReleaseLock() 1
#endif
PyAPI_FUNC(void) _PyImport_ReInitLock(void);

View File

@ -797,4 +797,12 @@ extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler;
#include <android/api-level.h>
#endif
/* This macro used to tell whether Python was built with multithreading
* enabled. Now multithreading is always enabled, but keep the macro
* for compatibility.
*/
#ifndef WITH_THREAD
#define WITH_THREAD
#endif
#endif /* Py_PYPORT_H */

View File

@ -218,12 +218,10 @@ PyAPI_FUNC(void) PyThreadState_Delete(PyThreadState *);
#ifndef Py_LIMITED_API
PyAPI_FUNC(void) _PyThreadState_DeleteExcept(PyThreadState *tstate);
#endif /* !Py_LIMITED_API */
#ifdef WITH_THREAD
PyAPI_FUNC(void) PyThreadState_DeleteCurrent(void);
#ifndef Py_LIMITED_API
PyAPI_FUNC(void) _PyGILState_Reinit(void);
#endif /* !Py_LIMITED_API */
#endif
/* Return the current thread state. The global interpreter lock must be held.
* When the current thread state is NULL, this issues a fatal error (so that
@ -257,7 +255,6 @@ typedef
enum {PyGILState_LOCKED, PyGILState_UNLOCKED}
PyGILState_STATE;
#ifdef WITH_THREAD
/* Ensure that the current thread is ready to call the Python
C API, regardless of the current state of Python, or of its
@ -319,7 +316,6 @@ PyAPI_FUNC(int) PyGILState_Check(void);
PyAPI_FUNC(PyInterpreterState *) _PyGILState_GetInterpreterStateUnsafe(void);
#endif
#endif /* #ifdef WITH_THREAD */
/* The implementation of sys._current_frames() Returns a dict mapping
thread id to that thread's current frame.

View File

@ -1,163 +0,0 @@
"""Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return 1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
def _set_sentinel():
"""Dummy implementation of _thread._set_sentinel()."""
return LockType()
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
def __repr__(self):
return "<%s %s.%s object at %s>" % (
"locked" if self.locked_status else "unlocked",
self.__class__.__module__,
self.__class__.__qualname__,
hex(id(self))
)
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True

View File

@ -436,75 +436,34 @@ _rounding_modes = (ROUND_DOWN, ROUND_HALF_UP, ROUND_HALF_EVEN, ROUND_CEILING,
# work for older Pythons. If threads are not part of the build, create a
# mock threading object with threading.local() returning the module namespace.
try:
import threading
except ImportError:
# Python was compiled without threads; create a mock object instead
class MockThreading(object):
def local(self, sys=sys):
return sys.modules[__xname__]
threading = MockThreading()
del MockThreading
import threading
try:
threading.local
local = threading.local()
if hasattr(local, '__decimal_context__'):
del local.__decimal_context__
except AttributeError:
def getcontext(_local=local):
"""Returns this thread's context.
# To fix reloading, force it to create a new context
# Old contexts have different exceptions in their dicts, making problems.
if hasattr(threading.current_thread(), '__decimal_context__'):
del threading.current_thread().__decimal_context__
def setcontext(context):
"""Set this thread's context to context."""
if context in (DefaultContext, BasicContext, ExtendedContext):
context = context.copy()
context.clear_flags()
threading.current_thread().__decimal_context__ = context
def getcontext():
"""Returns this thread's context.
If this thread does not yet have a context, returns
a new context and sets this thread's context.
New contexts are copies of DefaultContext.
"""
try:
return threading.current_thread().__decimal_context__
except AttributeError:
context = Context()
threading.current_thread().__decimal_context__ = context
return context
else:
local = threading.local()
if hasattr(local, '__decimal_context__'):
del local.__decimal_context__
def getcontext(_local=local):
"""Returns this thread's context.
If this thread does not yet have a context, returns
a new context and sets this thread's context.
New contexts are copies of DefaultContext.
"""
try:
return _local.__decimal_context__
except AttributeError:
context = Context()
_local.__decimal_context__ = context
return context
def setcontext(context, _local=local):
"""Set this thread's context to context."""
if context in (DefaultContext, BasicContext, ExtendedContext):
context = context.copy()
context.clear_flags()
If this thread does not yet have a context, returns
a new context and sets this thread's context.
New contexts are copies of DefaultContext.
"""
try:
return _local.__decimal_context__
except AttributeError:
context = Context()
_local.__decimal_context__ = context
return context
del threading, local # Don't contaminate the namespace
def setcontext(context, _local=local):
"""Set this thread's context to context."""
if context in (DefaultContext, BasicContext, ExtendedContext):
context = context.copy()
context.clear_flags()
_local.__decimal_context__ = context
del threading, local # Don't contaminate the namespace
def localcontext(ctx=None):
"""Return a context manager for a copy of the supplied context

View File

@ -9,10 +9,7 @@ import errno
import stat
import sys
# Import _thread instead of threading to reduce startup cost
try:
from _thread import allocate_lock as Lock
except ImportError:
from _dummy_thread import allocate_lock as Lock
from _thread import allocate_lock as Lock
if sys.platform in {'win32', 'cygwin'}:
from msvcrt import setmode as _setmode
else:

View File

@ -19,10 +19,7 @@ from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
from _thread import allocate_lock as _thread_allocate_lock
__all__ = []

View File

@ -14,11 +14,7 @@ import io
import os
import warnings
import _compression
try:
from threading import RLock
except ImportError:
from dummy_threading import RLock
from threading import RLock
from _bz2 import BZ2Compressor, BZ2Decompressor

View File

@ -1,10 +1,8 @@
import unittest, os, errno
import threading
from ctypes import *
from ctypes.util import find_library
try:
import threading
except ImportError:
threading = None
class Test(unittest.TestCase):
def test_open(self):
@ -25,26 +23,25 @@ class Test(unittest.TestCase):
self.assertEqual(set_errno(32), errno.ENOENT)
self.assertEqual(get_errno(), 32)
if threading:
def _worker():
set_errno(0)
libc = CDLL(libc_name, use_errno=False)
if os.name == "nt":
libc_open = libc._open
else:
libc_open = libc.open
libc_open.argtypes = c_char_p, c_int
self.assertEqual(libc_open(b"", 0), -1)
self.assertEqual(get_errno(), 0)
t = threading.Thread(target=_worker)
t.start()
t.join()
self.assertEqual(get_errno(), 32)
def _worker():
set_errno(0)
libc = CDLL(libc_name, use_errno=False)
if os.name == "nt":
libc_open = libc._open
else:
libc_open = libc.open
libc_open.argtypes = c_char_p, c_int
self.assertEqual(libc_open(b"", 0), -1)
self.assertEqual(get_errno(), 0)
t = threading.Thread(target=_worker)
t.start()
t.join()
self.assertEqual(get_errno(), 32)
set_errno(0)
@unittest.skipUnless(os.name == "nt", 'Test specific to Windows')
def test_GetLastError(self):
dll = WinDLL("kernel32", use_last_error=True)

View File

@ -1,78 +0,0 @@
"""Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
The module ``_dummy_threading`` is added to ``sys.modules`` in order
to not have ``threading`` considered imported. Had ``threading`` been
directly imported it would have made all subsequent imports succeed
regardless of whether ``_thread`` was available which is not desired.
"""
from sys import modules as sys_modules
import _dummy_thread
# Declaring now so as to not have to nest ``try``s to get proper clean-up.
holding_thread = False
holding_threading = False
holding__threading_local = False
try:
# Could have checked if ``_thread`` was not in sys.modules and gone
# a different route, but decided to mirror technique used with
# ``threading`` below.
if '_thread' in sys_modules:
held_thread = sys_modules['_thread']
holding_thread = True
# Must have some module named ``_thread`` that implements its API
# in order to initially import ``threading``.
sys_modules['_thread'] = sys_modules['_dummy_thread']
if 'threading' in sys_modules:
# If ``threading`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held_threading = sys_modules['threading']
holding_threading = True
del sys_modules['threading']
if '_threading_local' in sys_modules:
# If ``_threading_local`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held__threading_local = sys_modules['_threading_local']
holding__threading_local = True
del sys_modules['_threading_local']
import threading
# Need a copy of the code kept somewhere...
sys_modules['_dummy_threading'] = sys_modules['threading']
del sys_modules['threading']
sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
del sys_modules['_threading_local']
from _dummy_threading import *
from _dummy_threading import __all__
finally:
# Put back ``threading`` if we overwrote earlier
if holding_threading:
sys_modules['threading'] = held_threading
del held_threading
del holding_threading
# Put back ``_threading_local`` if we overwrote earlier
if holding__threading_local:
sys_modules['_threading_local'] = held__threading_local
del held__threading_local
del holding__threading_local
# Put back ``thread`` if we overwrote, else del the entry we made
if holding_thread:
sys_modules['_thread'] = held_thread
del held_thread
else:
del sys_modules['_thread']
del holding_thread
del _dummy_thread
del sys_modules

View File

@ -22,13 +22,7 @@ from collections import namedtuple
from types import MappingProxyType
from weakref import WeakKeyDictionary
from reprlib import recursive_repr
try:
from _thread import RLock
except ImportError:
class RLock:
'Dummy reentrant lock for builds without threads'
def __enter__(self): pass
def __exit__(self, exctype, excinst, exctb): pass
from _thread import RLock
################################################################################

View File

@ -33,10 +33,7 @@ import datetime
import re
import time
import urllib.parse, urllib.request
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import threading as _threading
import http.client # only for the default HTTP port
from calendar import timegm

View File

@ -37,10 +37,7 @@ __all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory',
'lastResort', 'raiseExceptions']
try:
import threading
except ImportError: #pragma: no cover
threading = None
import threading
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
@ -210,11 +207,7 @@ def _checkLevel(level):
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
#
if threading:
_lock = threading.RLock()
else: #pragma: no cover
_lock = None
_lock = threading.RLock()
def _acquireLock():
"""
@ -295,7 +288,7 @@ class LogRecord(object):
self.created = ct
self.msecs = (ct - int(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
if logThreads and threading:
if logThreads:
self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
else: # pragma: no cover
@ -799,10 +792,7 @@ class Handler(Filterer):
"""
Acquire a thread lock for serializing access to the underlying I/O.
"""
if threading:
self.lock = threading.RLock()
else: #pragma: no cover
self.lock = None
self.lock = threading.RLock()
def acquire(self):
"""

View File

@ -31,14 +31,9 @@ import logging.handlers
import re
import struct
import sys
import threading
import traceback
try:
import _thread as thread
import threading
except ImportError: #pragma: no cover
thread = None
from socketserver import ThreadingTCPServer, StreamRequestHandler
@ -816,8 +811,6 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None):
normal. Note that you can return transformed bytes, e.g. by decrypting
the bytes passed in.
"""
if not thread: #pragma: no cover
raise NotImplementedError("listen() needs threading to work")
class ConfigStreamHandler(StreamRequestHandler):
"""

View File

@ -26,10 +26,7 @@ To use, simply 'import logging.handlers' and log away!
import logging, socket, os, pickle, struct, time, re
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
try:
import threading
except ImportError: #pragma: no cover
threading = None
import threading
#
# Some constants...
@ -1395,110 +1392,110 @@ class QueueHandler(logging.Handler):
except Exception:
self.handleError(record)
if threading:
class QueueListener(object):
class QueueListener(object):
"""
This class implements an internal threaded listener which watches for
LogRecords being added to a queue, removes them and passes them to a
list of handlers for processing.
"""
_sentinel = None
def __init__(self, queue, *handlers, respect_handler_level=False):
"""
This class implements an internal threaded listener which watches for
LogRecords being added to a queue, removes them and passes them to a
list of handlers for processing.
Initialise an instance with the specified queue and
handlers.
"""
_sentinel = None
self.queue = queue
self.handlers = handlers
self._thread = None
self.respect_handler_level = respect_handler_level
def __init__(self, queue, *handlers, respect_handler_level=False):
"""
Initialise an instance with the specified queue and
handlers.
"""
self.queue = queue
self.handlers = handlers
self._thread = None
self.respect_handler_level = respect_handler_level
def dequeue(self, block):
"""
Dequeue a record and return it, optionally blocking.
def dequeue(self, block):
"""
Dequeue a record and return it, optionally blocking.
The base implementation uses get. You may want to override this method
if you want to use timeouts or work with custom queue implementations.
"""
return self.queue.get(block)
The base implementation uses get. You may want to override this method
if you want to use timeouts or work with custom queue implementations.
"""
return self.queue.get(block)
def start(self):
"""
Start the listener.
def start(self):
"""
Start the listener.
This starts up a background thread to monitor the queue for
LogRecords to process.
"""
self._thread = t = threading.Thread(target=self._monitor)
t.daemon = True
t.start()
This starts up a background thread to monitor the queue for
LogRecords to process.
"""
self._thread = t = threading.Thread(target=self._monitor)
t.daemon = True
t.start()
def prepare(self , record):
"""
Prepare a record for handling.
def prepare(self , record):
"""
Prepare a record for handling.
This method just returns the passed-in record. You may want to
override this method if you need to do any custom marshalling or
manipulation of the record before passing it to the handlers.
"""
return record
This method just returns the passed-in record. You may want to
override this method if you need to do any custom marshalling or
manipulation of the record before passing it to the handlers.
"""
return record
def handle(self, record):
"""
Handle a record.
def handle(self, record):
"""
Handle a record.
This just loops through the handlers offering them the record
to handle.
"""
record = self.prepare(record)
for handler in self.handlers:
if not self.respect_handler_level:
process = True
else:
process = record.levelno >= handler.level
if process:
handler.handle(record)
This just loops through the handlers offering them the record
to handle.
"""
record = self.prepare(record)
for handler in self.handlers:
if not self.respect_handler_level:
process = True
else:
process = record.levelno >= handler.level
if process:
handler.handle(record)
def _monitor(self):
"""
Monitor the queue for records, and ask the handler
to deal with them.
def _monitor(self):
"""
Monitor the queue for records, and ask the handler
to deal with them.
This method runs on a separate, internal thread.
The thread will terminate if it sees a sentinel object in the queue.
"""
q = self.queue
has_task_done = hasattr(q, 'task_done')
while True:
try:
record = self.dequeue(True)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
This method runs on a separate, internal thread.
The thread will terminate if it sees a sentinel object in the queue.
"""
q = self.queue
has_task_done = hasattr(q, 'task_done')
while True:
try:
record = self.dequeue(True)
if record is self._sentinel:
break
self.handle(record)
if has_task_done:
q.task_done()
except queue.Empty:
break
def enqueue_sentinel(self):
"""
This is used to enqueue the sentinel record.
def enqueue_sentinel(self):
"""
This is used to enqueue the sentinel record.
The base implementation uses put_nowait. You may want to override this
method if you want to use timeouts or work with custom queue
implementations.
"""
self.queue.put_nowait(self._sentinel)
The base implementation uses put_nowait. You may want to override this
method if you want to use timeouts or work with custom queue
implementations.
"""
self.queue.put_nowait(self._sentinel)
def stop(self):
"""
Stop the listener.
def stop(self):
"""
Stop the listener.
This asks the thread to terminate, and then waits for it to do so.
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
"""
self.enqueue_sentinel()
self._thread.join()
self._thread = None
This asks the thread to terminate, and then waits for it to do so.
Note that if you don't call this before your application exits, there
may be some records still left on the queue, which won't be processed.
"""
self.enqueue_sentinel()
self._thread.join()
self._thread = None

View File

@ -1,9 +1,6 @@
'''A multi-producer, multi-consumer queue.'''
try:
import threading
except ImportError:
import dummy_threading as threading
import threading
from collections import deque
from heapq import heappush, heappop
from time import monotonic as time

View File

@ -4,10 +4,7 @@ __all__ = ["Repr", "repr", "recursive_repr"]
import builtins
from itertools import islice
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
from _thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'

View File

@ -26,10 +26,7 @@ has another way to reference private data (besides global variables).
import time
import heapq
from collections import namedtuple
try:
import threading
except ImportError:
import dummy_threading as threading
import threading
from time import monotonic as _time
__all__ = ["scheduler"]

View File

@ -127,10 +127,7 @@ import socket
import selectors
import os
import sys
try:
import threading
except ImportError:
import dummy_threading as threading
import threading
from io import BufferedIOBase
from time import monotonic as time

View File

@ -21,12 +21,9 @@
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import threading
import unittest
import sqlite3 as sqlite
try:
import threading
except ImportError:
threading = None
from test.support import TESTFN, unlink
@ -503,7 +500,6 @@ class CursorTests(unittest.TestCase):
self.assertEqual(results, expected)
@unittest.skipUnless(threading, 'This test requires threading.')
class ThreadTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")

View File

@ -138,10 +138,7 @@ else:
import _posixsubprocess
import select
import selectors
try:
import threading
except ImportError:
import dummy_threading as threading
import threading
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.

View File

@ -44,11 +44,7 @@ import shutil as _shutil
import errno as _errno
from random import Random as _Random
import weakref as _weakref
try:
import _thread
except ImportError:
import _dummy_thread as _thread
import _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL

View File

@ -10,9 +10,9 @@ active threads survive in the child after a fork(); this is an error.
"""
import os, sys, time, unittest
import threading
import test.support as support
threading = support.import_module('threading')
LONGSLEEP = 2
SHORTSLEEP = 0.5

View File

@ -3,15 +3,11 @@ import json
import os
import queue
import sys
import threading
import time
import traceback
import types
from test import support
try:
import threading
except ImportError:
print("Multiprocess option requires thread support")
sys.exit(2)
from test.libregrtest.runtest import (
runtest, INTERRUPTED, CHILD_ERROR, PROGRESS_MIN_TIME,

View File

@ -5,12 +5,9 @@ import os
import shutil
import sys
import sysconfig
import threading
import warnings
from test import support
try:
import threading
except ImportError:
threading = None
try:
import _multiprocessing, multiprocessing.process
except ImportError:
@ -181,13 +178,9 @@ class saved_test_environment:
# Controlling dangling references to Thread objects can make it easier
# to track reference leaks.
def get_threading__dangling(self):
if not threading:
return None
# This copies the weakrefs without making any strong reference
return threading._dangling.copy()
def restore_threading__dangling(self, saved):
if not threading:
return
threading._dangling.clear()
threading._dangling.update(saved)

View File

@ -25,17 +25,14 @@ import subprocess
import sys
import sysconfig
import tempfile
import _thread
import threading
import time
import types
import unittest
import urllib.error
import warnings
try:
import _thread, threading
except ImportError:
_thread = None
threading = None
try:
import multiprocessing.process
except ImportError:
@ -2028,16 +2025,11 @@ environment_altered = False
# at the end of a test run.
def threading_setup():
if _thread:
return _thread._count(), threading._dangling.copy()
else:
return 1, ()
return _thread._count(), threading._dangling.copy()
def threading_cleanup(*original_values):
global environment_altered
if not _thread:
return
_MAX_COUNT = 100
t0 = time.monotonic()
for count in range(_MAX_COUNT):
@ -2061,9 +2053,6 @@ def reap_threads(func):
ensure that the threads are cleaned up even when the test fails.
If threading is unavailable this function does nothing.
"""
if not _thread:
return func
@functools.wraps(func)
def decorator(*args):
key = threading_setup()

View File

@ -2,110 +2,104 @@
from test import support
# If this fails, the test will be skipped.
thread = support.import_module('_thread')
import asynchat
import asyncore
import errno
import socket
import sys
import _thread as thread
import threading
import time
import unittest
import unittest.mock
try:
import threading
except ImportError:
threading = None
HOST = support.HOST
SERVER_QUIT = b'QUIT\n'
TIMEOUT = 3.0
if threading:
class echo_server(threading.Thread):
# parameter to determine the number of bytes passed back to the
# client each send
chunk_size = 1
def __init__(self, event):
threading.Thread.__init__(self)
self.event = event
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.sock)
# This will be set if the client wants us to wait before echoing
# data back.
self.start_resend_event = None
class echo_server(threading.Thread):
# parameter to determine the number of bytes passed back to the
# client each send
chunk_size = 1
def run(self):
self.sock.listen()
self.event.set()
conn, client = self.sock.accept()
self.buffer = b""
# collect data until quit message is seen
while SERVER_QUIT not in self.buffer:
data = conn.recv(1)
if not data:
break
self.buffer = self.buffer + data
def __init__(self, event):
threading.Thread.__init__(self)
self.event = event
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.port = support.bind_port(self.sock)
# This will be set if the client wants us to wait before echoing
# data back.
self.start_resend_event = None
# remove the SERVER_QUIT message
self.buffer = self.buffer.replace(SERVER_QUIT, b'')
def run(self):
self.sock.listen()
self.event.set()
conn, client = self.sock.accept()
self.buffer = b""
# collect data until quit message is seen
while SERVER_QUIT not in self.buffer:
data = conn.recv(1)
if not data:
break
self.buffer = self.buffer + data
if self.start_resend_event:
self.start_resend_event.wait()
# remove the SERVER_QUIT message
self.buffer = self.buffer.replace(SERVER_QUIT, b'')
# re-send entire set of collected data
try:
# this may fail on some tests, such as test_close_when_done,
# since the client closes the channel when it's done sending
while self.buffer:
n = conn.send(self.buffer[:self.chunk_size])
time.sleep(0.001)
self.buffer = self.buffer[n:]
except:
if self.start_resend_event:
self.start_resend_event.wait()
# re-send entire set of collected data
try:
# this may fail on some tests, such as test_close_when_done,
# since the client closes the channel when it's done sending
while self.buffer:
n = conn.send(self.buffer[:self.chunk_size])
time.sleep(0.001)
self.buffer = self.buffer[n:]
except:
pass
conn.close()
self.sock.close()
class echo_client(asynchat.async_chat):
def __init__(self, terminator, server_port):
asynchat.async_chat.__init__(self)
self.contents = []
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((HOST, server_port))
self.set_terminator(terminator)
self.buffer = b""
def handle_connect(self):
pass
if sys.platform == 'darwin':
# select.poll returns a select.POLLHUP at the end of the tests
# on darwin, so just ignore it
def handle_expt(self):
pass
conn.close()
self.sock.close()
def collect_incoming_data(self, data):
self.buffer += data
class echo_client(asynchat.async_chat):
def found_terminator(self):
self.contents.append(self.buffer)
self.buffer = b""
def __init__(self, terminator, server_port):
asynchat.async_chat.__init__(self)
self.contents = []
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((HOST, server_port))
self.set_terminator(terminator)
self.buffer = b""
def handle_connect(self):
pass
if sys.platform == 'darwin':
# select.poll returns a select.POLLHUP at the end of the tests
# on darwin, so just ignore it
def handle_expt(self):
pass
def collect_incoming_data(self, data):
self.buffer += data
def found_terminator(self):
self.contents.append(self.buffer)
self.buffer = b""
def start_echo_server():
event = threading.Event()
s = echo_server(event)
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
return s, event
def start_echo_server():
event = threading.Event()
s = echo_server(event)
s.start()
event.wait()
event.clear()
time.sleep(0.01) # Give server time to start accepting.
return s, event
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestAsynchat(unittest.TestCase):
usepoll = False

View File

@ -1,8 +1,6 @@
import os
from test.support import load_package_tests, import_module
# Skip tests if we don't have threading.
import_module('threading')
# Skip tests if we don't have concurrent.futures.
import_module('concurrent.futures')

View File

@ -7,6 +7,7 @@ import sys
import time
import errno
import struct
import threading
from test import support
from io import BytesIO
@ -14,10 +15,6 @@ from io import BytesIO
if support.PGO:
raise unittest.SkipTest("test is not helpful for PGO")
try:
import threading
except ImportError:
threading = None
TIMEOUT = 3
HAS_UNIX_SOCKETS = hasattr(socket, 'AF_UNIX')
@ -326,7 +323,6 @@ class DispatcherWithSendTests(unittest.TestCase):
def tearDown(self):
asyncore.close_all()
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.reap_threads
def test_send(self):
evt = threading.Event()
@ -776,7 +772,6 @@ class BaseTestAPI:
self.assertTrue(s.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR))
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.reap_threads
def test_quick_connect(self):
# see: http://bugs.python.org/issue10340

View File

@ -10,13 +10,10 @@ import pathlib
import random
import shutil
import subprocess
import threading
from test.support import unlink
import _compression
try:
import threading
except ImportError:
threading = None
# Skip tests if the bz2 module doesn't exist.
bz2 = support.import_module('bz2')
@ -491,7 +488,6 @@ class BZ2FileTest(BaseTest):
else:
self.fail("1/0 didn't raise an exception")
@unittest.skipUnless(threading, 'Threading required for this test.')
def testThreading(self):
# Issue #7205: Using a BZ2File from several threads shouldn't deadlock.
data = b"1" * 2**20
@ -504,13 +500,6 @@ class BZ2FileTest(BaseTest):
with support.start_threads(threads):
pass
def testWithoutThreading(self):
module = support.import_fresh_module("bz2", blocked=("threading",))
with module.BZ2File(self.filename, "wb") as f:
f.write(b"abc")
with module.BZ2File(self.filename, "rb") as f:
self.assertEqual(f.read(), b"abc")
def testMixedIterationAndReads(self):
self.createTempFile()
linelen = len(self.TEXT_LINES[0])

View File

@ -11,6 +11,7 @@ import subprocess
import sys
import sysconfig
import textwrap
import threading
import time
import unittest
from test import support
@ -20,10 +21,7 @@ try:
import _posixsubprocess
except ImportError:
_posixsubprocess = None
try:
import threading
except ImportError:
threading = None
# Skip this test if the _testcapi module isn't available.
_testcapi = support.import_module('_testcapi')
@ -52,7 +50,6 @@ class CAPITest(unittest.TestCase):
self.assertEqual(testfunction.attribute, "test")
self.assertRaises(AttributeError, setattr, inst.testfunction, "attribute", "test")
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_no_FatalError_infinite_loop(self):
with support.SuppressCrashReport():
p = subprocess.Popen([sys.executable, "-c",
@ -276,7 +273,6 @@ class CAPITest(unittest.TestCase):
self.assertIn(b'MemoryError 3 30', out)
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
@ -685,7 +681,6 @@ class SkipitemTest(unittest.TestCase):
parse((1,), {}, 'O|OO', ['', 'a', ''])
@unittest.skipUnless(threading, 'Threading required for this test.')
class TestThreadState(unittest.TestCase):
@support.reap_threads
@ -762,7 +757,6 @@ class PyMemDebugTests(unittest.TestCase):
regex = regex.format(ptr=self.PTR_REGEX)
self.assertRegex(out, regex)
@unittest.skipUnless(threading, 'Test requires a GIL (multithreading)')
def check_malloc_without_gil(self, code):
out = self.check(code)
expected = ('Fatal Python error: Python memory allocator called '

View File

@ -4,10 +4,6 @@ import test.support
test.support.import_module('_multiprocessing')
# Skip tests if sem_open implementation is broken.
test.support.import_module('multiprocessing.synchronize')
# import threading after _multiprocessing to raise a more relevant error
# message: "No module named _multiprocessing". _multiprocessing is not compiled
# without thread support.
test.support.import_module('threading')
from test.support.script_helper import assert_python_ok

View File

@ -3,13 +3,10 @@
import io
import sys
import tempfile
import threading
import unittest
from contextlib import * # Tests __all__
from test import support
try:
import threading
except ImportError:
threading = None
class TestAbstractContextManager(unittest.TestCase):
@ -275,7 +272,6 @@ class FileContextTestCase(unittest.TestCase):
finally:
support.unlink(tfn)
@unittest.skipUnless(threading, 'Threading required for this test.')
class LockContextTestCase(unittest.TestCase):
def boilerPlate(self, lock, locked):

View File

@ -38,10 +38,7 @@ from test.support import (import_fresh_module, TestFailed,
run_with_locale, cpython_only)
import random
import inspect
try:
import threading
except ImportError:
threading = None
import threading
C = import_fresh_module('decimal', fresh=['_decimal'])
@ -1625,10 +1622,10 @@ class ThreadingTest(unittest.TestCase):
DefaultContext.Emax = save_emax
DefaultContext.Emin = save_emin
@unittest.skipUnless(threading, 'threading required')
class CThreadingTest(ThreadingTest):
decimal = C
@unittest.skipUnless(threading, 'threading required')
class PyThreadingTest(ThreadingTest):
decimal = P

View File

@ -1,8 +1,8 @@
from xmlrpc.server import DocXMLRPCServer
import http.client
import sys
import threading
from test import support
threading = support.import_module('threading')
import unittest
def make_request_and_skipIf(condition, reason):

View File

@ -1,255 +0,0 @@
import _dummy_thread as _thread
import time
import queue
import random
import unittest
from test import support
from unittest import mock
DELAY = 0
class LockTests(unittest.TestCase):
"""Test lock objects."""
def setUp(self):
# Create a lock
self.lock = _thread.allocate_lock()
def test_initlock(self):
#Make sure locks start locked
self.assertFalse(self.lock.locked(),
"Lock object is not initialized unlocked.")
def test_release(self):
# Test self.lock.release()
self.lock.acquire()
self.lock.release()
self.assertFalse(self.lock.locked(),
"Lock object did not release properly.")
def test_LockType_context_manager(self):
with _thread.LockType():
pass
self.assertFalse(self.lock.locked(),
"Acquired Lock was not released")
def test_improper_release(self):
#Make sure release of an unlocked thread raises RuntimeError
self.assertRaises(RuntimeError, self.lock.release)
def test_cond_acquire_success(self):
#Make sure the conditional acquiring of the lock works.
self.assertTrue(self.lock.acquire(0),
"Conditional acquiring of the lock failed.")
def test_cond_acquire_fail(self):
#Test acquiring locked lock returns False
self.lock.acquire(0)
self.assertFalse(self.lock.acquire(0),
"Conditional acquiring of a locked lock incorrectly "
"succeeded.")
def test_uncond_acquire_success(self):
#Make sure unconditional acquiring of a lock works.
self.lock.acquire()
self.assertTrue(self.lock.locked(),
"Uncondional locking failed.")
def test_uncond_acquire_return_val(self):
#Make sure that an unconditional locking returns True.
self.assertIs(self.lock.acquire(1), True,
"Unconditional locking did not return True.")
self.assertIs(self.lock.acquire(), True)
def test_uncond_acquire_blocking(self):
#Make sure that unconditional acquiring of a locked lock blocks.
def delay_unlock(to_unlock, delay):
"""Hold on to lock for a set amount of time before unlocking."""
time.sleep(delay)
to_unlock.release()
self.lock.acquire()
start_time = int(time.time())
_thread.start_new_thread(delay_unlock,(self.lock, DELAY))
if support.verbose:
print()
print("*** Waiting for thread to release the lock "\
"(approx. %s sec.) ***" % DELAY)
self.lock.acquire()
end_time = int(time.time())
if support.verbose:
print("done")
self.assertGreaterEqual(end_time - start_time, DELAY,
"Blocking by unconditional acquiring failed.")
@mock.patch('time.sleep')
def test_acquire_timeout(self, mock_sleep):
"""Test invoking acquire() with a positive timeout when the lock is
already acquired. Ensure that time.sleep() is invoked with the given
timeout and that False is returned."""
self.lock.acquire()
retval = self.lock.acquire(waitflag=0, timeout=1)
self.assertTrue(mock_sleep.called)
mock_sleep.assert_called_once_with(1)
self.assertEqual(retval, False)
def test_lock_representation(self):
self.lock.acquire()
self.assertIn("locked", repr(self.lock))
self.lock.release()
self.assertIn("unlocked", repr(self.lock))
class MiscTests(unittest.TestCase):
"""Miscellaneous tests."""
def test_exit(self):
self.assertRaises(SystemExit, _thread.exit)
def test_ident(self):
self.assertIsInstance(_thread.get_ident(), int,
"_thread.get_ident() returned a non-integer")
self.assertGreater(_thread.get_ident(), 0)
def test_LockType(self):
self.assertIsInstance(_thread.allocate_lock(), _thread.LockType,
"_thread.LockType is not an instance of what "
"is returned by _thread.allocate_lock()")
def test_set_sentinel(self):
self.assertIsInstance(_thread._set_sentinel(), _thread.LockType,
"_thread._set_sentinel() did not return a "
"LockType instance.")
def test_interrupt_main(self):
#Calling start_new_thread with a function that executes interrupt_main
# should raise KeyboardInterrupt upon completion.
def call_interrupt():
_thread.interrupt_main()
self.assertRaises(KeyboardInterrupt,
_thread.start_new_thread,
call_interrupt,
tuple())
def test_interrupt_in_main(self):
self.assertRaises(KeyboardInterrupt, _thread.interrupt_main)
def test_stack_size_None(self):
retval = _thread.stack_size(None)
self.assertEqual(retval, 0)
def test_stack_size_not_None(self):
with self.assertRaises(_thread.error) as cm:
_thread.stack_size("")
self.assertEqual(cm.exception.args[0],
"setting thread stack size not supported")
class ThreadTests(unittest.TestCase):
"""Test thread creation."""
def test_arg_passing(self):
#Make sure that parameter passing works.
def arg_tester(queue, arg1=False, arg2=False):
"""Use to test _thread.start_new_thread() passes args properly."""
queue.put((arg1, arg2))
testing_queue = queue.Queue(1)
_thread.start_new_thread(arg_tester, (testing_queue, True, True))
result = testing_queue.get()
self.assertTrue(result[0] and result[1],
"Argument passing for thread creation "
"using tuple failed")
_thread.start_new_thread(
arg_tester,
tuple(),
{'queue':testing_queue, 'arg1':True, 'arg2':True})
result = testing_queue.get()
self.assertTrue(result[0] and result[1],
"Argument passing for thread creation "
"using kwargs failed")
_thread.start_new_thread(
arg_tester,
(testing_queue, True),
{'arg2':True})
result = testing_queue.get()
self.assertTrue(result[0] and result[1],
"Argument passing for thread creation using both tuple"
" and kwargs failed")
def test_multi_thread_creation(self):
def queue_mark(queue, delay):
time.sleep(delay)
queue.put(_thread.get_ident())
thread_count = 5
testing_queue = queue.Queue(thread_count)
if support.verbose:
print()
print("*** Testing multiple thread creation "
"(will take approx. %s to %s sec.) ***" % (
DELAY, thread_count))
for count in range(thread_count):
if DELAY:
local_delay = round(random.random(), 1)
else:
local_delay = 0
_thread.start_new_thread(queue_mark,
(testing_queue, local_delay))
time.sleep(DELAY)
if support.verbose:
print('done')
self.assertEqual(testing_queue.qsize(), thread_count,
"Not all %s threads executed properly "
"after %s sec." % (thread_count, DELAY))
def test_args_not_tuple(self):
"""
Test invoking start_new_thread() with a non-tuple value for "args".
Expect TypeError with a meaningful error message to be raised.
"""
with self.assertRaises(TypeError) as cm:
_thread.start_new_thread(mock.Mock(), [])
self.assertEqual(cm.exception.args[0], "2nd arg must be a tuple")
def test_kwargs_not_dict(self):
"""
Test invoking start_new_thread() with a non-dict value for "kwargs".
Expect TypeError with a meaningful error message to be raised.
"""
with self.assertRaises(TypeError) as cm:
_thread.start_new_thread(mock.Mock(), tuple(), kwargs=[])
self.assertEqual(cm.exception.args[0], "3rd arg must be a dict")
def test_SystemExit(self):
"""
Test invoking start_new_thread() with a function that raises
SystemExit.
The exception should be discarded.
"""
func = mock.Mock(side_effect=SystemExit())
try:
_thread.start_new_thread(func, tuple())
except SystemExit:
self.fail("start_new_thread raised SystemExit.")
@mock.patch('traceback.print_exc')
def test_RaiseException(self, mock_print_exc):
"""
Test invoking start_new_thread() with a function that raises exception.
The exception should be discarded and the traceback should be printed
via traceback.print_exc()
"""
func = mock.Mock(side_effect=Exception)
_thread.start_new_thread(func, tuple())
self.assertTrue(mock_print_exc.called)

View File

@ -1,60 +0,0 @@
from test import support
import unittest
import dummy_threading as _threading
import time
class DummyThreadingTestCase(unittest.TestCase):
class TestThread(_threading.Thread):
def run(self):
global running
global sema
global mutex
# Uncomment if testing another module, such as the real 'threading'
# module.
#delay = random.random() * 2
delay = 0
if support.verbose:
print('task', self.name, 'will run for', delay, 'sec')
sema.acquire()
mutex.acquire()
running += 1
if support.verbose:
print(running, 'tasks are running')
mutex.release()
time.sleep(delay)
if support.verbose:
print('task', self.name, 'done')
mutex.acquire()
running -= 1
if support.verbose:
print(self.name, 'is finished.', running, 'tasks are running')
mutex.release()
sema.release()
def setUp(self):
self.numtasks = 10
global sema
sema = _threading.BoundedSemaphore(value=3)
global mutex
mutex = _threading.RLock()
global running
running = 0
self.threads = []
def test_tasks(self):
for i in range(self.numtasks):
t = self.TestThread(name="<thread %d>"%i)
self.threads.append(t)
t.start()
if support.verbose:
print('waiting for all tasks to complete')
for t in self.threads:
t.join()
if support.verbose:
print('all tasks done')
if __name__ == '__main__':
unittest.main()

View File

@ -12,10 +12,7 @@ from io import StringIO, BytesIO
from itertools import chain
from random import choice
from socket import getfqdn
try:
from threading import Thread
except ImportError:
from dummy_threading import Thread
from threading import Thread
import email
import email.policy

View File

@ -2,15 +2,12 @@ import enum
import inspect
import pydoc
import unittest
import threading
from collections import OrderedDict
from enum import Enum, IntEnum, EnumMeta, Flag, IntFlag, unique, auto
from io import StringIO
from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
from test import support
try:
import threading
except ImportError:
threading = None
# for pickle tests
@ -1988,7 +1985,6 @@ class TestFlag(unittest.TestCase):
d = 6
self.assertEqual(repr(Bizarre(7)), '<Bizarre.d|c|b: 7>')
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only
@ -2339,7 +2335,6 @@ class TestIntFlag(unittest.TestCase):
for f in Open:
self.assertEqual(bool(f.value), bool(f))
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.reap_threads
def test_unique_composite(self):
# override __eq__ to be identity only

View File

@ -8,14 +8,10 @@ import sys
from test import support
from test.support import script_helper, is_android, requires_android_level
import tempfile
import threading
import unittest
from textwrap import dedent
try:
import threading
HAVE_THREADS = True
except ImportError:
HAVE_THREADS = False
try:
import _testcapi
except ImportError:
@ -154,7 +150,6 @@ class FaultHandlerTests(unittest.TestCase):
3,
'Segmentation fault')
@unittest.skipIf(not HAVE_THREADS, 'need threads')
def test_fatal_error_c_thread(self):
self.check_fatal_error("""
import faulthandler
@ -231,7 +226,7 @@ class FaultHandlerTests(unittest.TestCase):
2,
'xyz')
@unittest.skipIf(sys.platform.startswith('openbsd') and HAVE_THREADS,
@unittest.skipIf(sys.platform.startswith('openbsd'),
"Issue #12868: sigaltstack() doesn't work on "
"OpenBSD if Python is compiled with pthread")
@unittest.skipIf(not hasattr(faulthandler, '_stack_overflow'),
@ -456,7 +451,6 @@ class FaultHandlerTests(unittest.TestCase):
self.assertEqual(trace, expected)
self.assertEqual(exitcode, 0)
@unittest.skipIf(not HAVE_THREADS, 'need threads')
def check_dump_traceback_threads(self, filename):
"""
Call explicitly dump_traceback(all_threads=True) and check the output.

View File

@ -5,6 +5,7 @@ import _imp as imp
import os
import signal
import sys
import threading
import time
import unittest
@ -12,7 +13,6 @@ from test.fork_wait import ForkWait
from test.support import (reap_children, get_attribute,
import_module, verbose)
threading = import_module('threading')
# Skip test if fork does not exist.
get_attribute(os, 'fork')

View File

@ -10,6 +10,7 @@ import socket
import io
import errno
import os
import threading
import time
try:
import ssl
@ -19,7 +20,6 @@ except ImportError:
from unittest import TestCase, skipUnless
from test import support
from test.support import HOST, HOSTv6
threading = support.import_module('threading')
TIMEOUT = 3
# the dummy data returned by server over the data channel when

View File

@ -8,15 +8,12 @@ import pickle
from random import choice
import sys
from test import support
import threading
import time
import unittest
import unittest.mock
from weakref import proxy
import contextlib
try:
import threading
except ImportError:
threading = None
import functools
@ -1406,7 +1403,6 @@ class TestLRU:
for attr in self.module.WRAPPER_ASSIGNMENTS:
self.assertEqual(getattr(g, attr), getattr(f, attr))
@unittest.skipUnless(threading, 'This test requires threading.')
def test_lru_cache_threaded(self):
n, m = 5, 11
def orig(x, y):
@ -1455,7 +1451,6 @@ class TestLRU:
finally:
sys.setswitchinterval(orig_si)
@unittest.skipUnless(threading, 'This test requires threading.')
def test_lru_cache_threaded2(self):
# Simultaneous call with the same arguments
n, m = 5, 7
@ -1483,7 +1478,6 @@ class TestLRU:
pause.reset()
self.assertEqual(f.cache_info(), (0, (i+1)*n, m*n, i+1))
@unittest.skipUnless(threading, 'This test requires threading.')
def test_lru_cache_threaded3(self):
@self.module.lru_cache(maxsize=2)
def f(x):

View File

@ -8,11 +8,7 @@ import sys
import time
import gc
import weakref
try:
import threading
except ImportError:
threading = None
import threading
try:
from _testcapi import with_tp_del
@ -352,7 +348,6 @@ class GCTests(unittest.TestCase):
v = {1: v, 2: Ouch()}
gc.disable()
@unittest.skipUnless(threading, "test meaningless on builds without threads")
def test_trashcan_threads(self):
# Issue #13992: trashcan mechanism should be thread-safe
NESTING = 60

View File

@ -12,12 +12,6 @@ import sysconfig
import textwrap
import unittest
# Is this Python configured to support threads?
try:
import _thread
except ImportError:
_thread = None
from test import support
from test.support import run_unittest, findfile, python_is_optimized
@ -755,8 +749,6 @@ Traceback \(most recent call first\):
foo\(1, 2, 3\)
''')
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_threads(self):
'Verify that "py-bt" indicates threads that are waiting for the GIL'
cmd = '''
@ -794,8 +786,6 @@ id(42)
# Some older versions of gdb will fail with
# "Cannot find new threads: generic error"
# unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_gc(self):
'Verify that "py-bt" indicates if a thread is garbage-collecting'
cmd = ('from gc import collect\n'
@ -822,8 +812,6 @@ id(42)
# Some older versions of gdb will fail with
# "Cannot find new threads: generic error"
# unless we add LD_PRELOAD=PATH-TO-libpthread.so.1 as a workaround
@unittest.skipUnless(_thread,
"Python was compiled without thread support")
def test_pycfunction(self):
'Verify that "py-bt" displays invocations of PyCFunction instances'
# Tested function must not be defined with METH_NOARGS or METH_O,

View File

@ -12,10 +12,7 @@ import hashlib
import itertools
import os
import sys
try:
import threading
except ImportError:
threading = None
import threading
import unittest
import warnings
from test import support
@ -738,7 +735,6 @@ class HashLibTestCase(unittest.TestCase):
m = hashlib.md5(b'x' * gil_minsize)
self.assertEqual(m.hexdigest(), 'cfb767f225d58469c5de3632a8803958')
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.reap_threads
def test_threaded_hashing(self):
# Updating the same hash object from several threads at once

View File

@ -22,12 +22,13 @@ import urllib.parse
import tempfile
import time
import datetime
import threading
from unittest import mock
from io import BytesIO
import unittest
from test import support
threading = support.import_module('threading')
class NoLogRequestHandler:
def log_message(self, *args):

View File

@ -3,7 +3,6 @@ from test.support import import_module
# Skip test if _thread or _tkinter wasn't built, if idlelib is missing,
# or if tcl/tk is not the 8.5+ needed for ttk widgets.
import_module('threading') # imported by PyShell, imports _thread
tk = import_module('tkinter') # imports _tkinter
if tk.TkVersion < 8.5:
raise unittest.SkipTest("IDLE requires tk 8.5 or later.")

View File

@ -1,8 +1,4 @@
from test import support
# If we end up with a significant number of tests that don't require
# threading, this test module should be split. Right now we skip
# them all if we don't have threading.
threading = support.import_module('threading')
from contextlib import contextmanager
import imaplib
@ -10,6 +6,7 @@ import os.path
import socketserver
import time
import calendar
import threading
from test.support import (reap_threads, verbose, transient_internet,
run_with_tz, run_with_locale, cpython_only)

View File

@ -1,7 +1,3 @@
try:
import _thread
except ImportError:
_thread = None
import importlib
import importlib.util
import os
@ -23,7 +19,6 @@ def requires_load_dynamic(meth):
'imp.load_dynamic() required')(meth)
@unittest.skipIf(_thread is None, '_thread module is required')
class LockTests(unittest.TestCase):
"""Very basic test of import lock functions."""

View File

@ -3,134 +3,112 @@ from . import util as test_util
init = test_util.import_importlib('importlib')
import sys
import threading
import unittest
import weakref
from test import support
try:
import threading
except ImportError:
threading = None
else:
from test import lock_tests
if threading is not None:
class ModuleLockAsRLockTests:
locktype = classmethod(lambda cls: cls.LockType("some_lock"))
# _is_owned() unsupported
test__is_owned = None
# acquire(blocking=False) unsupported
test_try_acquire = None
test_try_acquire_contended = None
# `with` unsupported
test_with = None
# acquire(timeout=...) unsupported
test_timeout = None
# _release_save() unsupported
test_release_save_unacquired = None
# lock status in repr unsupported
test_repr = None
test_locked_repr = None
LOCK_TYPES = {kind: splitinit._bootstrap._ModuleLock
for kind, splitinit in init.items()}
(Frozen_ModuleLockAsRLockTests,
Source_ModuleLockAsRLockTests
) = test_util.test_both(ModuleLockAsRLockTests, lock_tests.RLockTests,
LockType=LOCK_TYPES)
else:
LOCK_TYPES = {}
class Frozen_ModuleLockAsRLockTests(unittest.TestCase):
pass
class Source_ModuleLockAsRLockTests(unittest.TestCase):
pass
from test import lock_tests
if threading is not None:
class DeadlockAvoidanceTests:
class ModuleLockAsRLockTests:
locktype = classmethod(lambda cls: cls.LockType("some_lock"))
def setUp(self):
# _is_owned() unsupported
test__is_owned = None
# acquire(blocking=False) unsupported
test_try_acquire = None
test_try_acquire_contended = None
# `with` unsupported
test_with = None
# acquire(timeout=...) unsupported
test_timeout = None
# _release_save() unsupported
test_release_save_unacquired = None
# lock status in repr unsupported
test_repr = None
test_locked_repr = None
LOCK_TYPES = {kind: splitinit._bootstrap._ModuleLock
for kind, splitinit in init.items()}
(Frozen_ModuleLockAsRLockTests,
Source_ModuleLockAsRLockTests
) = test_util.test_both(ModuleLockAsRLockTests, lock_tests.RLockTests,
LockType=LOCK_TYPES)
class DeadlockAvoidanceTests:
def setUp(self):
try:
self.old_switchinterval = sys.getswitchinterval()
support.setswitchinterval(0.000001)
except AttributeError:
self.old_switchinterval = None
def tearDown(self):
if self.old_switchinterval is not None:
sys.setswitchinterval(self.old_switchinterval)
def run_deadlock_avoidance_test(self, create_deadlock):
NLOCKS = 10
locks = [self.LockType(str(i)) for i in range(NLOCKS)]
pairs = [(locks[i], locks[(i+1)%NLOCKS]) for i in range(NLOCKS)]
if create_deadlock:
NTHREADS = NLOCKS
else:
NTHREADS = NLOCKS - 1
barrier = threading.Barrier(NTHREADS)
results = []
def _acquire(lock):
"""Try to acquire the lock. Return True on success,
False on deadlock."""
try:
self.old_switchinterval = sys.getswitchinterval()
support.setswitchinterval(0.000001)
except AttributeError:
self.old_switchinterval = None
def tearDown(self):
if self.old_switchinterval is not None:
sys.setswitchinterval(self.old_switchinterval)
def run_deadlock_avoidance_test(self, create_deadlock):
NLOCKS = 10
locks = [self.LockType(str(i)) for i in range(NLOCKS)]
pairs = [(locks[i], locks[(i+1)%NLOCKS]) for i in range(NLOCKS)]
if create_deadlock:
NTHREADS = NLOCKS
lock.acquire()
except self.DeadlockError:
return False
else:
NTHREADS = NLOCKS - 1
barrier = threading.Barrier(NTHREADS)
results = []
return True
def _acquire(lock):
"""Try to acquire the lock. Return True on success,
False on deadlock."""
try:
lock.acquire()
except self.DeadlockError:
return False
else:
return True
def f():
a, b = pairs.pop()
ra = _acquire(a)
barrier.wait()
rb = _acquire(b)
results.append((ra, rb))
if rb:
b.release()
if ra:
a.release()
lock_tests.Bunch(f, NTHREADS).wait_for_finished()
self.assertEqual(len(results), NTHREADS)
return results
def f():
a, b = pairs.pop()
ra = _acquire(a)
barrier.wait()
rb = _acquire(b)
results.append((ra, rb))
if rb:
b.release()
if ra:
a.release()
lock_tests.Bunch(f, NTHREADS).wait_for_finished()
self.assertEqual(len(results), NTHREADS)
return results
def test_deadlock(self):
results = self.run_deadlock_avoidance_test(True)
# At least one of the threads detected a potential deadlock on its
# second acquire() call. It may be several of them, because the
# deadlock avoidance mechanism is conservative.
nb_deadlocks = results.count((True, False))
self.assertGreaterEqual(nb_deadlocks, 1)
self.assertEqual(results.count((True, True)), len(results) - nb_deadlocks)
def test_deadlock(self):
results = self.run_deadlock_avoidance_test(True)
# At least one of the threads detected a potential deadlock on its
# second acquire() call. It may be several of them, because the
# deadlock avoidance mechanism is conservative.
nb_deadlocks = results.count((True, False))
self.assertGreaterEqual(nb_deadlocks, 1)
self.assertEqual(results.count((True, True)), len(results) - nb_deadlocks)
def test_no_deadlock(self):
results = self.run_deadlock_avoidance_test(False)
self.assertEqual(results.count((True, False)), 0)
self.assertEqual(results.count((True, True)), len(results))
def test_no_deadlock(self):
results = self.run_deadlock_avoidance_test(False)
self.assertEqual(results.count((True, False)), 0)
self.assertEqual(results.count((True, True)), len(results))
DEADLOCK_ERRORS = {kind: splitinit._bootstrap._DeadlockError
for kind, splitinit in init.items()}
DEADLOCK_ERRORS = {kind: splitinit._bootstrap._DeadlockError
for kind, splitinit in init.items()}
(Frozen_DeadlockAvoidanceTests,
Source_DeadlockAvoidanceTests
) = test_util.test_both(DeadlockAvoidanceTests,
LockType=LOCK_TYPES,
DeadlockError=DEADLOCK_ERRORS)
else:
DEADLOCK_ERRORS = {}
class Frozen_DeadlockAvoidanceTests(unittest.TestCase):
pass
class Source_DeadlockAvoidanceTests(unittest.TestCase):
pass
(Frozen_DeadlockAvoidanceTests,
Source_DeadlockAvoidanceTests
) = test_util.test_both(DeadlockAvoidanceTests,
LockType=LOCK_TYPES,
DeadlockError=DEADLOCK_ERRORS)
class LifetimeTests:

View File

@ -28,6 +28,7 @@ import pickle
import random
import signal
import sys
import threading
import time
import unittest
import warnings
@ -40,10 +41,6 @@ from test.support.script_helper import assert_python_ok, run_python_until_end
import codecs
import io # C implementation of io
import _pyio as pyio # Python implementation of io
try:
import threading
except ImportError:
threading = None
try:
import ctypes
@ -443,8 +440,6 @@ class IOTest(unittest.TestCase):
(self.BytesIO, "rws"), (self.StringIO, "rws"),
)
for [test, abilities] in tests:
if test is pipe_writer and not threading:
continue # Skip subtest that uses a background thread
with self.subTest(test), test() as obj:
readable = "r" in abilities
self.assertEqual(obj.readable(), readable)
@ -1337,7 +1332,6 @@ class BufferedReaderTest(unittest.TestCase, CommonBufferedTests):
self.assertEqual(b"abcdefg", bufio.read())
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.requires_resource('cpu')
def test_threads(self):
try:
@ -1664,7 +1658,6 @@ class BufferedWriterTest(unittest.TestCase, CommonBufferedTests):
with self.open(support.TESTFN, "rb", buffering=0) as f:
self.assertEqual(f.read(), b"abc")
@unittest.skipUnless(threading, 'Threading required for this test.')
@support.requires_resource('cpu')
def test_threads(self):
try:
@ -3053,7 +3046,6 @@ class TextIOWrapperTest(unittest.TestCase):
self.assertEqual(f.errors, "replace")
@support.no_tracing
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threads_write(self):
# Issue6750: concurrent writes could duplicate data
event = threading.Event()
@ -3804,7 +3796,6 @@ class CMiscIOTest(MiscIOTest):
b = bytearray(2)
self.assertRaises(ValueError, bufio.readinto, b)
@unittest.skipUnless(threading, 'Threading required for this test.')
def check_daemon_threads_shutdown_deadlock(self, stream_name):
# Issue #23309: deadlocks at shutdown should be avoided when a
# daemon thread and the main thread both write to a file.
@ -3868,7 +3859,6 @@ class SignalsTest(unittest.TestCase):
def alarm_interrupt(self, sig, frame):
1/0
@unittest.skipUnless(threading, 'Threading required for this test.')
def check_interrupted_write(self, item, bytes, **fdopen_kwargs):
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
@ -3990,7 +3980,6 @@ class SignalsTest(unittest.TestCase):
self.check_interrupted_read_retry(lambda x: x,
mode="r")
@unittest.skipUnless(threading, 'Threading required for this test.')
def check_interrupted_write_retry(self, item, **fdopen_kwargs):
"""Check that a buffered write, when it gets interrupted (either
returning a partial result or EINTR), properly invokes the signal

View File

@ -42,22 +42,19 @@ import tempfile
from test.support.script_helper import assert_python_ok
from test import support
import textwrap
import threading
import time
import unittest
import warnings
import weakref
try:
import threading
# The following imports are needed only for tests which
# require threading
import asyncore
from http.server import HTTPServer, BaseHTTPRequestHandler
import smtpd
from urllib.parse import urlparse, parse_qs
from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
ThreadingTCPServer, StreamRequestHandler)
except ImportError:
threading = None
import asyncore
from http.server import HTTPServer, BaseHTTPRequestHandler
import smtpd
from urllib.parse import urlparse, parse_qs
from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
ThreadingTCPServer, StreamRequestHandler)
try:
import win32evtlog, win32evtlogutil, pywintypes
except ImportError:
@ -625,7 +622,6 @@ class HandlerTest(BaseTest):
os.unlink(fn)
@unittest.skipIf(os.name == 'nt', 'WatchedFileHandler not appropriate for Windows.')
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_race(self):
# Issue #14632 refers.
def remove_loop(fname, tries):
@ -719,276 +715,274 @@ class StreamHandlerTest(BaseTest):
# -- The following section could be moved into a server_helper.py module
# -- if it proves to be of wider utility than just test_logging
if threading:
class TestSMTPServer(smtpd.SMTPServer):
"""
This class implements a test SMTP server.
class TestSMTPServer(smtpd.SMTPServer):
"""
This class implements a test SMTP server.
:param addr: A (host, port) tuple which the server listens on.
You can specify a port value of zero: the server's
*port* attribute will hold the actual port number
used, which can be used in client connections.
:param handler: A callable which will be called to process
incoming messages. The handler will be passed
the client address tuple, who the message is from,
a list of recipients and the message data.
:param addr: A (host, port) tuple which the server listens on.
You can specify a port value of zero: the server's
*port* attribute will hold the actual port number
used, which can be used in client connections.
:param handler: A callable which will be called to process
incoming messages. The handler will be passed
the client address tuple, who the message is from,
a list of recipients and the message data.
:param poll_interval: The interval, in seconds, used in the underlying
:func:`select` or :func:`poll` call by
:func:`asyncore.loop`.
:param sockmap: A dictionary which will be used to hold
:class:`asyncore.dispatcher` instances used by
:func:`asyncore.loop`. This avoids changing the
:mod:`asyncore` module's global state.
"""
def __init__(self, addr, handler, poll_interval, sockmap):
smtpd.SMTPServer.__init__(self, addr, None, map=sockmap,
decode_data=True)
self.port = self.socket.getsockname()[1]
self._handler = handler
self._thread = None
self.poll_interval = poll_interval
def process_message(self, peer, mailfrom, rcpttos, data):
"""
Delegates to the handler passed in to the server's constructor.
Typically, this will be a test case method.
:param peer: The client (host, port) tuple.
:param mailfrom: The address of the sender.
:param rcpttos: The addresses of the recipients.
:param data: The message.
"""
self._handler(peer, mailfrom, rcpttos, data)
def start(self):
"""
Start the server running on a separate daemon thread.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
def serve_forever(self, poll_interval):
"""
Run the :mod:`asyncore` loop until normal termination
conditions arise.
:param poll_interval: The interval, in seconds, used in the underlying
:func:`select` or :func:`poll` call by
:func:`asyncore.loop`.
:param sockmap: A dictionary which will be used to hold
:class:`asyncore.dispatcher` instances used by
:func:`asyncore.loop`. This avoids changing the
:mod:`asyncore` module's global state.
"""
try:
asyncore.loop(poll_interval, map=self._map)
except OSError:
# On FreeBSD 8, closing the server repeatably
# raises this error. We swallow it if the
# server has been closed.
if self.connected or self.accepting:
raise
def __init__(self, addr, handler, poll_interval, sockmap):
smtpd.SMTPServer.__init__(self, addr, None, map=sockmap,
decode_data=True)
self.port = self.socket.getsockname()[1]
self._handler = handler
self._thread = None
self.poll_interval = poll_interval
def stop(self, timeout=None):
"""
Stop the thread by closing the server instance.
Wait for the server thread to terminate.
def process_message(self, peer, mailfrom, rcpttos, data):
"""
Delegates to the handler passed in to the server's constructor.
:param timeout: How long to wait for the server thread
to terminate.
"""
self.close()
self._thread.join(timeout)
asyncore.close_all(map=self._map, ignore_all=True)
Typically, this will be a test case method.
:param peer: The client (host, port) tuple.
:param mailfrom: The address of the sender.
:param rcpttos: The addresses of the recipients.
:param data: The message.
"""
self._handler(peer, mailfrom, rcpttos, data)
alive = self._thread.is_alive()
self._thread = None
if alive:
self.fail("join() timed out")
def start(self):
"""
Start the server running on a separate daemon thread.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
class ControlMixin(object):
"""
This mixin is used to start a server on a separate thread, and
shut it down programmatically. Request handling is simplified - instead
of needing to derive a suitable RequestHandler subclass, you just
provide a callable which will be passed each received request to be
processed.
def serve_forever(self, poll_interval):
"""
Run the :mod:`asyncore` loop until normal termination
conditions arise.
:param poll_interval: The interval, in seconds, used in the underlying
:func:`select` or :func:`poll` call by
:func:`asyncore.loop`.
"""
try:
asyncore.loop(poll_interval, map=self._map)
except OSError:
# On FreeBSD 8, closing the server repeatably
# raises this error. We swallow it if the
# server has been closed.
if self.connected or self.accepting:
raise
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request. This handler is called on the
server thread, effectively meaning that requests are
processed serially. While not quite Web scale ;-),
this should be fine for testing applications.
:param poll_interval: The polling interval in seconds.
"""
def __init__(self, handler, poll_interval):
self._thread = None
self.poll_interval = poll_interval
self._handler = handler
self.ready = threading.Event()
def stop(self, timeout=None):
"""
Stop the thread by closing the server instance.
Wait for the server thread to terminate.
def start(self):
"""
Create a daemon thread to run the server, and start it.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
:param timeout: How long to wait for the server thread
to terminate.
"""
self.close()
def serve_forever(self, poll_interval):
"""
Run the server. Set the ready flag before entering the
service loop.
"""
self.ready.set()
super(ControlMixin, self).serve_forever(poll_interval)
def stop(self, timeout=None):
"""
Tell the server thread to stop, and wait for it to do so.
:param timeout: How long to wait for the server thread
to terminate.
"""
self.shutdown()
if self._thread is not None:
self._thread.join(timeout)
asyncore.close_all(map=self._map, ignore_all=True)
alive = self._thread.is_alive()
self._thread = None
if alive:
self.fail("join() timed out")
self.server_close()
self.ready.clear()
class ControlMixin(object):
"""
This mixin is used to start a server on a separate thread, and
shut it down programmatically. Request handling is simplified - instead
of needing to derive a suitable RequestHandler subclass, you just
provide a callable which will be passed each received request to be
processed.
class TestHTTPServer(ControlMixin, HTTPServer):
"""
An HTTP server which is controllable using :class:`ControlMixin`.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request. This handler is called on the
server thread, effectively meaning that requests are
processed serially. While not quite Web scale ;-),
this should be fine for testing applications.
:param poll_interval: The polling interval in seconds.
"""
def __init__(self, handler, poll_interval):
self._thread = None
self.poll_interval = poll_interval
self._handler = handler
self.ready = threading.Event()
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval in seconds.
:param log: Pass ``True`` to enable log messages.
"""
def __init__(self, addr, handler, poll_interval=0.5,
log=False, sslctx=None):
class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
def __getattr__(self, name, default=None):
if name.startswith('do_'):
return self.process_request
raise AttributeError(name)
def start(self):
"""
Create a daemon thread to run the server, and start it.
"""
self._thread = t = threading.Thread(target=self.serve_forever,
args=(self.poll_interval,))
t.setDaemon(True)
t.start()
def process_request(self):
self.server._handler(self)
def serve_forever(self, poll_interval):
"""
Run the server. Set the ready flag before entering the
service loop.
"""
self.ready.set()
super(ControlMixin, self).serve_forever(poll_interval)
def log_message(self, format, *args):
if log:
super(DelegatingHTTPRequestHandler,
self).log_message(format, *args)
HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
ControlMixin.__init__(self, handler, poll_interval)
self.sslctx = sslctx
def stop(self, timeout=None):
"""
Tell the server thread to stop, and wait for it to do so.
def get_request(self):
try:
sock, addr = self.socket.accept()
if self.sslctx:
sock = self.sslctx.wrap_socket(sock, server_side=True)
except OSError as e:
# socket errors are silenced by the caller, print them here
sys.stderr.write("Got an error:\n%s\n" % e)
raise
return sock, addr
:param timeout: How long to wait for the server thread
to terminate.
"""
self.shutdown()
if self._thread is not None:
self._thread.join(timeout)
alive = self._thread.is_alive()
self._thread = None
if alive:
self.fail("join() timed out")
self.server_close()
self.ready.clear()
class TestTCPServer(ControlMixin, ThreadingTCPServer):
"""
A TCP server which is controllable using :class:`ControlMixin`.
class TestHTTPServer(ControlMixin, HTTPServer):
"""
An HTTP server which is controllable using :class:`ControlMixin`.
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a single
parameter - the request - in order to process the request.
:param poll_interval: The polling interval in seconds.
:bind_and_activate: If True (the default), binds the server and starts it
listening. If False, you need to call
:meth:`server_bind` and :meth:`server_activate` at
some later time before calling :meth:`start`, so that
the server will set up the socket and listen on it.
"""
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval in seconds.
:param log: Pass ``True`` to enable log messages.
"""
def __init__(self, addr, handler, poll_interval=0.5,
log=False, sslctx=None):
class DelegatingHTTPRequestHandler(BaseHTTPRequestHandler):
def __getattr__(self, name, default=None):
if name.startswith('do_'):
return self.process_request
raise AttributeError(name)
allow_reuse_address = True
def process_request(self):
self.server._handler(self)
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingTCPRequestHandler(StreamRequestHandler):
def log_message(self, format, *args):
if log:
super(DelegatingHTTPRequestHandler,
self).log_message(format, *args)
HTTPServer.__init__(self, addr, DelegatingHTTPRequestHandler)
ControlMixin.__init__(self, handler, poll_interval)
self.sslctx = sslctx
def handle(self):
self.server._handler(self)
ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
def get_request(self):
try:
sock, addr = self.socket.accept()
if self.sslctx:
sock = self.sslctx.wrap_socket(sock, server_side=True)
except OSError as e:
# socket errors are silenced by the caller, print them here
sys.stderr.write("Got an error:\n%s\n" % e)
raise
return sock, addr
def server_bind(self):
super(TestTCPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
class TestTCPServer(ControlMixin, ThreadingTCPServer):
"""
A TCP server which is controllable using :class:`ControlMixin`.
class TestUDPServer(ControlMixin, ThreadingUDPServer):
"""
A UDP server which is controllable using :class:`ControlMixin`.
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a single
parameter - the request - in order to process the request.
:param poll_interval: The polling interval in seconds.
:bind_and_activate: If True (the default), binds the server and starts it
listening. If False, you need to call
:meth:`server_bind` and :meth:`server_activate` at
some later time before calling :meth:`start`, so that
the server will set up the socket and listen on it.
"""
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval for shutdown requests,
in seconds.
:bind_and_activate: If True (the default), binds the server and
starts it listening. If False, you need to
call :meth:`server_bind` and
:meth:`server_activate` at some later time
before calling :meth:`start`, so that the server will
set up the socket and listen on it.
"""
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingUDPRequestHandler(DatagramRequestHandler):
allow_reuse_address = True
def handle(self):
self.server._handler(self)
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingTCPRequestHandler(StreamRequestHandler):
def finish(self):
data = self.wfile.getvalue()
if data:
try:
super(DelegatingUDPRequestHandler, self).finish()
except OSError:
if not self.server._closed:
raise
def handle(self):
self.server._handler(self)
ThreadingTCPServer.__init__(self, addr, DelegatingTCPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
ThreadingUDPServer.__init__(self, addr,
DelegatingUDPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
self._closed = False
def server_bind(self):
super(TestTCPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
def server_bind(self):
super(TestUDPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
class TestUDPServer(ControlMixin, ThreadingUDPServer):
"""
A UDP server which is controllable using :class:`ControlMixin`.
def server_close(self):
super(TestUDPServer, self).server_close()
self._closed = True
:param addr: A tuple with the IP address and port to listen on.
:param handler: A handler callable which will be called with a
single parameter - the request - in order to
process the request.
:param poll_interval: The polling interval for shutdown requests,
in seconds.
:bind_and_activate: If True (the default), binds the server and
starts it listening. If False, you need to
call :meth:`server_bind` and
:meth:`server_activate` at some later time
before calling :meth:`start`, so that the server will
set up the socket and listen on it.
"""
def __init__(self, addr, handler, poll_interval=0.5,
bind_and_activate=True):
class DelegatingUDPRequestHandler(DatagramRequestHandler):
if hasattr(socket, "AF_UNIX"):
class TestUnixStreamServer(TestTCPServer):
address_family = socket.AF_UNIX
def handle(self):
self.server._handler(self)
def finish(self):
data = self.wfile.getvalue()
if data:
try:
super(DelegatingUDPRequestHandler, self).finish()
except OSError:
if not self.server._closed:
raise
ThreadingUDPServer.__init__(self, addr,
DelegatingUDPRequestHandler,
bind_and_activate)
ControlMixin.__init__(self, handler, poll_interval)
self._closed = False
def server_bind(self):
super(TestUDPServer, self).server_bind()
self.port = self.socket.getsockname()[1]
def server_close(self):
super(TestUDPServer, self).server_close()
self._closed = True
if hasattr(socket, "AF_UNIX"):
class TestUnixStreamServer(TestTCPServer):
address_family = socket.AF_UNIX
class TestUnixDatagramServer(TestUDPServer):
address_family = socket.AF_UNIX
class TestUnixDatagramServer(TestUDPServer):
address_family = socket.AF_UNIX
# - end of server_helper section
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPHandlerTest(BaseTest):
TIMEOUT = 8.0
@ -1472,14 +1466,12 @@ class ConfigFileTest(BaseTest):
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(threading, 'Threading required for this test.')
class SocketHandlerTest(BaseTest):
"""Test for SocketHandler objects."""
if threading:
server_class = TestTCPServer
address = ('localhost', 0)
server_class = TestTCPServer
address = ('localhost', 0)
def setUp(self):
"""Set up a TCP server to receive log messages, and a SocketHandler
@ -1573,12 +1565,11 @@ def _get_temp_domain_socket():
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
@unittest.skipUnless(threading, 'Threading required for this test.')
class UnixSocketHandlerTest(SocketHandlerTest):
"""Test for SocketHandler with unix sockets."""
if threading and hasattr(socket, "AF_UNIX"):
if hasattr(socket, "AF_UNIX"):
server_class = TestUnixStreamServer
def setUp(self):
@ -1591,14 +1582,12 @@ class UnixSocketHandlerTest(SocketHandlerTest):
support.unlink(self.address)
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(threading, 'Threading required for this test.')
class DatagramHandlerTest(BaseTest):
"""Test for DatagramHandler."""
if threading:
server_class = TestUDPServer
address = ('localhost', 0)
server_class = TestUDPServer
address = ('localhost', 0)
def setUp(self):
"""Set up a UDP server to receive log messages, and a DatagramHandler
@ -1659,12 +1648,11 @@ class DatagramHandlerTest(BaseTest):
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
@unittest.skipUnless(threading, 'Threading required for this test.')
class UnixDatagramHandlerTest(DatagramHandlerTest):
"""Test for DatagramHandler using Unix sockets."""
if threading and hasattr(socket, "AF_UNIX"):
if hasattr(socket, "AF_UNIX"):
server_class = TestUnixDatagramServer
def setUp(self):
@ -1676,14 +1664,12 @@ class UnixDatagramHandlerTest(DatagramHandlerTest):
DatagramHandlerTest.tearDown(self)
support.unlink(self.address)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SysLogHandlerTest(BaseTest):
"""Test for SysLogHandler using UDP."""
if threading:
server_class = TestUDPServer
address = ('localhost', 0)
server_class = TestUDPServer
address = ('localhost', 0)
def setUp(self):
"""Set up a UDP server to receive log messages, and a SysLogHandler
@ -1747,12 +1733,11 @@ class SysLogHandlerTest(BaseTest):
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(hasattr(socket, "AF_UNIX"), "Unix sockets required")
@unittest.skipUnless(threading, 'Threading required for this test.')
class UnixSysLogHandlerTest(SysLogHandlerTest):
"""Test for SysLogHandler with Unix sockets."""
if threading and hasattr(socket, "AF_UNIX"):
if hasattr(socket, "AF_UNIX"):
server_class = TestUnixDatagramServer
def setUp(self):
@ -1767,7 +1752,6 @@ class UnixSysLogHandlerTest(SysLogHandlerTest):
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(support.IPV6_ENABLED,
'IPv6 support required for this test.')
@unittest.skipUnless(threading, 'Threading required for this test.')
class IPv6SysLogHandlerTest(SysLogHandlerTest):
"""Test for SysLogHandler with IPv6 host."""
@ -1783,7 +1767,6 @@ class IPv6SysLogHandlerTest(SysLogHandlerTest):
self.server_class.address_family = socket.AF_INET
super(IPv6SysLogHandlerTest, self).tearDown()
@unittest.skipUnless(threading, 'Threading required for this test.')
class HTTPHandlerTest(BaseTest):
"""Test for HTTPHandler."""
@ -2892,7 +2875,6 @@ class ConfigDictTest(BaseTest):
# listen() uses ConfigSocketReceiver which is based
# on socketserver.ThreadingTCPServer
@unittest.skipIf(True, "FIXME: bpo-30830")
@unittest.skipUnless(threading, 'listen() needs threading to work')
def setup_via_listener(self, text, verify=None):
text = text.encode("utf-8")
# Ask for a randomly assigned port (by using port 0)
@ -2923,7 +2905,6 @@ class ConfigDictTest(BaseTest):
if t.is_alive():
self.fail("join() timed out")
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_10_ok(self):
with support.captured_stdout() as output:
self.setup_via_listener(json.dumps(self.config10))
@ -2943,7 +2924,6 @@ class ConfigDictTest(BaseTest):
('ERROR', '4'),
], stream=output)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_config_1_ok(self):
with support.captured_stdout() as output:
self.setup_via_listener(textwrap.dedent(ConfigFileTest.config1))
@ -2958,7 +2938,6 @@ class ConfigDictTest(BaseTest):
# Original logger output is empty.
self.assert_log_lines([])
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_listen_verify(self):
def verify_fail(stuff):
@ -3713,9 +3692,8 @@ class LogRecordTest(BaseTest):
def test_optional(self):
r = logging.makeLogRecord({})
NOT_NONE = self.assertIsNotNone
if threading:
NOT_NONE(r.thread)
NOT_NONE(r.threadName)
NOT_NONE(r.thread)
NOT_NONE(r.threadName)
NOT_NONE(r.process)
NOT_NONE(r.processName)
log_threads = logging.logThreads

View File

@ -6,6 +6,8 @@ import unittest
import functools
import contextlib
import os.path
import threading
from test import support
from nntplib import NNTP, GroupInfo
import nntplib
@ -14,10 +16,7 @@ try:
import ssl
except ImportError:
ssl = None
try:
import threading
except ImportError:
threading = None
TIMEOUT = 30
certfile = os.path.join(os.path.dirname(__file__), 'keycert3.pem')
@ -1520,7 +1519,7 @@ class MockSslTests(MockSocketTests):
def nntp_class(*pos, **kw):
return nntplib.NNTP_SSL(*pos, ssl_context=bypass_context, **kw)
@unittest.skipUnless(threading, 'requires multithreading')
class LocalServerTests(unittest.TestCase):
def setUp(self):
sock = socket.socket()

View File

@ -22,15 +22,13 @@ import stat
import subprocess
import sys
import sysconfig
import threading
import time
import unittest
import uuid
import warnings
from test import support
try:
import threading
except ImportError:
threading = None
try:
import resource
except ImportError:
@ -2516,92 +2514,90 @@ class ProgramPriorityTests(unittest.TestCase):
raise
if threading is not None:
class SendfileTestServer(asyncore.dispatcher, threading.Thread):
class SendfileTestServer(asyncore.dispatcher, threading.Thread):
class Handler(asynchat.async_chat):
class Handler(asynchat.async_chat):
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.in_buffer = []
self.closed = False
self.push(b"220 ready\r\n")
def __init__(self, conn):
asynchat.async_chat.__init__(self, conn)
self.in_buffer = []
self.closed = False
self.push(b"220 ready\r\n")
def handle_read(self):
data = self.recv(4096)
self.in_buffer.append(data)
def handle_read(self):
data = self.recv(4096)
self.in_buffer.append(data)
def get_data(self):
return b''.join(self.in_buffer)
def get_data(self):
return b''.join(self.in_buffer)
def handle_close(self):
self.close()
self.closed = True
def handle_error(self):
raise
def __init__(self, address):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
self._active = False
self._active_lock = threading.Lock()
# --- public API
@property
def running(self):
return self._active
def start(self):
assert not self.running
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def stop(self):
assert self.running
self._active = False
self.join()
def wait(self):
# wait for handler connection to be closed, then stop the server
while not getattr(self.handler_instance, "closed", False):
time.sleep(0.001)
self.stop()
# --- internals
def run(self):
self._active = True
self.__flag.set()
while self._active and asyncore.socket_map:
self._active_lock.acquire()
asyncore.loop(timeout=0.001, count=1)
self._active_lock.release()
asyncore.close_all()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.Handler(conn)
def handle_connect(self):
def handle_close(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
self.closed = True
def handle_error(self):
raise
def __init__(self, address):
threading.Thread.__init__(self)
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.bind(address)
self.listen(5)
self.host, self.port = self.socket.getsockname()[:2]
self.handler_instance = None
self._active = False
self._active_lock = threading.Lock()
# --- public API
@property
def running(self):
return self._active
def start(self):
assert not self.running
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def stop(self):
assert self.running
self._active = False
self.join()
def wait(self):
# wait for handler connection to be closed, then stop the server
while not getattr(self.handler_instance, "closed", False):
time.sleep(0.001)
self.stop()
# --- internals
def run(self):
self._active = True
self.__flag.set()
while self._active and asyncore.socket_map:
self._active_lock.acquire()
asyncore.loop(timeout=0.001, count=1)
self._active_lock.release()
asyncore.close_all()
def handle_accept(self):
conn, addr = self.accept()
self.handler_instance = self.Handler(conn)
def handle_connect(self):
self.close()
handle_read = handle_connect
def writable(self):
return 0
def handle_error(self):
raise
@unittest.skipUnless(threading is not None, "test needs threading module")
@unittest.skipUnless(hasattr(os, 'sendfile'), "test needs os.sendfile()")
class TestSendfile(unittest.TestCase):

View File

@ -1040,9 +1040,6 @@ class PdbTestCase(unittest.TestCase):
# invoking "continue" on a non-main thread triggered an exception
# inside signal.signal
# raises SkipTest if python was built without threads
support.import_module('threading')
with open(support.TESTFN, 'wb') as f:
f.write(textwrap.dedent("""
import threading

View File

@ -4,10 +4,7 @@ import os
import subprocess
import random
import select
try:
import threading
except ImportError:
threading = None
import threading
import time
import unittest
from test.support import TESTFN, run_unittest, reap_threads, cpython_only
@ -179,7 +176,6 @@ class PollTests(unittest.TestCase):
self.assertRaises(OverflowError, pollster.poll, INT_MAX + 1)
self.assertRaises(OverflowError, pollster.poll, UINT_MAX + 1)
@unittest.skipUnless(threading, 'Threading required for this test.')
@reap_threads
def test_threaded_poll(self):
r, w = os.pipe()

View File

@ -9,10 +9,10 @@ import asynchat
import socket
import os
import errno
import threading
from unittest import TestCase, skipUnless
from test import support as test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
PORT = 0

View File

@ -20,6 +20,7 @@ import urllib.parse
import xml.etree
import xml.etree.ElementTree
import textwrap
import threading
from io import StringIO
from collections import namedtuple
from test.support.script_helper import assert_python_ok
@ -30,10 +31,6 @@ from test.support import (
)
from test import pydoc_mod
try:
import threading
except ImportError:
threading = None
class nonascii:
'Це не латиниця'
@ -902,7 +899,6 @@ class TestDescriptions(unittest.TestCase):
"stat(path, *, dir_fd=None, follow_symlinks=True)")
@unittest.skipUnless(threading, 'Threading required for this test.')
class PydocServerTest(unittest.TestCase):
"""Tests for pydoc._start_server"""

View File

@ -1,10 +1,11 @@
# Some simple queue module tests, plus some failure conditions
# to ensure the Queue locks remain stable.
import queue
import threading
import time
import unittest
from test import support
threading = support.import_module('threading')
QUEUE_SIZE = 5

View File

@ -15,6 +15,7 @@ import sys
import sysconfig
import tempfile
import textwrap
import threading
import unittest
from test import libregrtest
from test import support
@ -741,12 +742,7 @@ class ArgsTestCase(BaseTestCase):
code = TEST_INTERRUPTED
test = self.create_test("sigint", code=code)
try:
import threading
tests = (False, True)
except ImportError:
tests = (False,)
for multiprocessing in tests:
for multiprocessing in (False, True):
if multiprocessing:
args = ("--slowest", "-j2", test)
else:

View File

@ -1,14 +1,11 @@
import io
import os
import threading
import unittest
import urllib.robotparser
from collections import namedtuple
from test import support
from http.server import BaseHTTPRequestHandler, HTTPServer
try:
import threading
except ImportError:
threading = None
class BaseRobotTest:
@ -255,7 +252,6 @@ class RobotHandler(BaseHTTPRequestHandler):
pass
@unittest.skipUnless(threading, 'threading required for this test')
class PasswordProtectedSiteTestCase(unittest.TestCase):
def setUp(self):

View File

@ -1,11 +1,9 @@
import queue
import sched
import threading
import time
import unittest
try:
import threading
except ImportError:
threading = None
TIMEOUT = 10
@ -58,7 +56,6 @@ class TestCase(unittest.TestCase):
scheduler.run()
self.assertEqual(l, [0.01, 0.02, 0.03, 0.04, 0.05])
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_enter_concurrent(self):
q = queue.Queue()
fun = q.put
@ -113,7 +110,6 @@ class TestCase(unittest.TestCase):
scheduler.run()
self.assertEqual(l, [0.02, 0.03, 0.04])
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_cancel_concurrent(self):
q = queue.Queue()
fun = q.put

View File

@ -5,14 +5,11 @@ import socket
import statistics
import subprocess
import sys
import threading
import time
import unittest
from test import support
from test.support.script_helper import assert_python_ok, spawn_python
try:
import threading
except ImportError:
threading = None
try:
import _testcapi
except ImportError:
@ -21,7 +18,6 @@ except ImportError:
class GenericTests(unittest.TestCase):
@unittest.skipIf(threading is None, "test needs threading module")
def test_enums(self):
for name in dir(signal):
sig = getattr(signal, name)
@ -807,7 +803,6 @@ class PendingSignalsTests(unittest.TestCase):
'need signal.sigwait()')
@unittest.skipUnless(hasattr(signal, 'pthread_sigmask'),
'need signal.pthread_sigmask()')
@unittest.skipIf(threading is None, "test needs threading module")
def test_sigwait_thread(self):
# Check that calling sigwait() from a thread doesn't suspend the whole
# process. A new interpreter is spawned to avoid problems when mixing

View File

@ -15,14 +15,11 @@ import time
import select
import errno
import textwrap
import threading
import unittest
from test import support, mock_socket
try:
import threading
except ImportError:
threading = None
HOST = support.HOST
@ -191,7 +188,6 @@ MSG_END = '------------ END MESSAGE ------------\n'
# test server times out, causing the test to fail.
# Test behavior of smtpd.DebuggingServer
@unittest.skipUnless(threading, 'Threading required for this test.')
class DebuggingServerTests(unittest.TestCase):
maxDiff = None
@ -570,7 +566,6 @@ class NonConnectingTests(unittest.TestCase):
# test response of client to a non-successful HELO message
@unittest.skipUnless(threading, 'Threading required for this test.')
class BadHELOServerTests(unittest.TestCase):
def setUp(self):
@ -590,7 +585,6 @@ class BadHELOServerTests(unittest.TestCase):
HOST, self.port, 'localhost', 3)
@unittest.skipUnless(threading, 'Threading required for this test.')
class TooLongLineTests(unittest.TestCase):
respdata = b'250 OK' + (b'.' * smtplib._MAXLINE * 2) + b'\n'
@ -835,7 +829,6 @@ class SimSMTPServer(smtpd.SMTPServer):
# Test various SMTP & ESMTP commands/behaviors that require a simulated server
# (i.e., something with more features than DebuggingServer)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPSimTests(unittest.TestCase):
def setUp(self):
@ -1091,7 +1084,6 @@ class SimSMTPUTF8Server(SimSMTPServer):
self.last_rcpt_options = rcpt_options
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPUTF8SimTests(unittest.TestCase):
maxDiff = None
@ -1227,7 +1219,6 @@ class SimSMTPAUTHInitialResponseServer(SimSMTPServer):
channel_class = SimSMTPAUTHInitialResponseChannel
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPAUTHInitialResponseSimTests(unittest.TestCase):
def setUp(self):
self.real_getfqdn = socket.getfqdn

View File

@ -21,6 +21,8 @@ import pickle
import struct
import random
import string
import _thread as thread
import threading
try:
import multiprocessing
except ImportError:
@ -35,12 +37,6 @@ MSG = 'Michael Gilfix was here\u1234\r\n'.encode('utf-8') ## test unicode string
VSOCKPORT = 1234
try:
import _thread as thread
import threading
except ImportError:
thread = None
threading = None
try:
import _socket
except ImportError:
@ -143,18 +139,17 @@ class ThreadSafeCleanupTestCase(unittest.TestCase):
with a recursive lock.
"""
if threading:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._cleanup_lock = threading.RLock()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._cleanup_lock = threading.RLock()
def addCleanup(self, *args, **kwargs):
with self._cleanup_lock:
return super().addCleanup(*args, **kwargs)
def addCleanup(self, *args, **kwargs):
with self._cleanup_lock:
return super().addCleanup(*args, **kwargs)
def doCleanups(self, *args, **kwargs):
with self._cleanup_lock:
return super().doCleanups(*args, **kwargs)
def doCleanups(self, *args, **kwargs):
with self._cleanup_lock:
return super().doCleanups(*args, **kwargs)
class SocketCANTest(unittest.TestCase):
@ -407,7 +402,6 @@ class ThreadedRDSSocketTest(SocketRDSTest, ThreadableTest):
ThreadableTest.clientTearDown(self)
@unittest.skipIf(fcntl is None, "need fcntl")
@unittest.skipUnless(thread, 'Threading required for this test.')
@unittest.skipUnless(HAVE_SOCKET_VSOCK,
'VSOCK sockets required for this test.')
@unittest.skipUnless(get_cid() != 2,
@ -1684,7 +1678,6 @@ class BasicCANTest(unittest.TestCase):
@unittest.skipUnless(HAVE_SOCKET_CAN, 'SocketCan required for this test.')
@unittest.skipUnless(thread, 'Threading required for this test.')
class CANTest(ThreadedCANSocketTest):
def __init__(self, methodName='runTest'):
@ -1838,7 +1831,6 @@ class BasicRDSTest(unittest.TestCase):
@unittest.skipUnless(HAVE_SOCKET_RDS, 'RDS sockets required for this test.')
@unittest.skipUnless(thread, 'Threading required for this test.')
class RDSTest(ThreadedRDSSocketTest):
def __init__(self, methodName='runTest'):
@ -1977,7 +1969,7 @@ class BasicVSOCKTest(unittest.TestCase):
s.getsockopt(socket.AF_VSOCK,
socket.SO_VM_SOCKETS_BUFFER_MIN_SIZE))
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicTCPTest(SocketConnectedTest):
def __init__(self, methodName='runTest'):
@ -2100,7 +2092,7 @@ class BasicTCPTest(SocketConnectedTest):
def _testDetach(self):
self.serv_conn.send(MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicUDPTest(ThreadedUDPSocketTest):
def __init__(self, methodName='runTest'):
@ -3697,17 +3689,14 @@ class SendrecvmsgUDPTestBase(SendrecvmsgDgramFlagsBase,
pass
@requireAttrs(socket.socket, "sendmsg")
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendmsgUDPTest(SendmsgConnectionlessTests, SendrecvmsgUDPTestBase):
pass
@requireAttrs(socket.socket, "recvmsg")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgUDPTest(RecvmsgTests, SendrecvmsgUDPTestBase):
pass
@requireAttrs(socket.socket, "recvmsg_into")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoUDPTest(RecvmsgIntoTests, SendrecvmsgUDPTestBase):
pass
@ -3724,21 +3713,18 @@ class SendrecvmsgUDP6TestBase(SendrecvmsgDgramFlagsBase,
@requireAttrs(socket.socket, "sendmsg")
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
@requireSocket("AF_INET6", "SOCK_DGRAM")
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendmsgUDP6Test(SendmsgConnectionlessTests, SendrecvmsgUDP6TestBase):
pass
@requireAttrs(socket.socket, "recvmsg")
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
@requireSocket("AF_INET6", "SOCK_DGRAM")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgUDP6Test(RecvmsgTests, SendrecvmsgUDP6TestBase):
pass
@requireAttrs(socket.socket, "recvmsg_into")
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
@requireSocket("AF_INET6", "SOCK_DGRAM")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoUDP6Test(RecvmsgIntoTests, SendrecvmsgUDP6TestBase):
pass
@ -3746,7 +3732,6 @@ class RecvmsgIntoUDP6Test(RecvmsgIntoTests, SendrecvmsgUDP6TestBase):
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
@requireAttrs(socket, "IPPROTO_IPV6")
@requireSocket("AF_INET6", "SOCK_DGRAM")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgRFC3542AncillaryUDP6Test(RFC3542AncillaryTest,
SendrecvmsgUDP6TestBase):
pass
@ -3755,7 +3740,6 @@ class RecvmsgRFC3542AncillaryUDP6Test(RFC3542AncillaryTest,
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 required for this test.')
@requireAttrs(socket, "IPPROTO_IPV6")
@requireSocket("AF_INET6", "SOCK_DGRAM")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoRFC3542AncillaryUDP6Test(RecvmsgIntoMixin,
RFC3542AncillaryTest,
SendrecvmsgUDP6TestBase):
@ -3767,18 +3751,15 @@ class SendrecvmsgTCPTestBase(SendrecvmsgConnectedBase,
pass
@requireAttrs(socket.socket, "sendmsg")
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendmsgTCPTest(SendmsgStreamTests, SendrecvmsgTCPTestBase):
pass
@requireAttrs(socket.socket, "recvmsg")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgTCPTest(RecvmsgTests, RecvmsgGenericStreamTests,
SendrecvmsgTCPTestBase):
pass
@requireAttrs(socket.socket, "recvmsg_into")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoTCPTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
SendrecvmsgTCPTestBase):
pass
@ -3791,13 +3772,11 @@ class SendrecvmsgSCTPStreamTestBase(SendrecvmsgSCTPFlagsBase,
@requireAttrs(socket.socket, "sendmsg")
@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendmsgSCTPStreamTest(SendmsgStreamTests, SendrecvmsgSCTPStreamTestBase):
pass
@requireAttrs(socket.socket, "recvmsg")
@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgSCTPStreamTest(RecvmsgTests, RecvmsgGenericStreamTests,
SendrecvmsgSCTPStreamTestBase):
@ -3811,7 +3790,6 @@ class RecvmsgSCTPStreamTest(RecvmsgTests, RecvmsgGenericStreamTests,
@requireAttrs(socket.socket, "recvmsg_into")
@requireSocket("AF_INET", "SOCK_STREAM", "IPPROTO_SCTP")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoSCTPStreamTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
SendrecvmsgSCTPStreamTestBase):
@ -3830,33 +3808,28 @@ class SendrecvmsgUnixStreamTestBase(SendrecvmsgConnectedBase,
@requireAttrs(socket.socket, "sendmsg")
@requireAttrs(socket, "AF_UNIX")
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendmsgUnixStreamTest(SendmsgStreamTests, SendrecvmsgUnixStreamTestBase):
pass
@requireAttrs(socket.socket, "recvmsg")
@requireAttrs(socket, "AF_UNIX")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgUnixStreamTest(RecvmsgTests, RecvmsgGenericStreamTests,
SendrecvmsgUnixStreamTestBase):
pass
@requireAttrs(socket.socket, "recvmsg_into")
@requireAttrs(socket, "AF_UNIX")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoUnixStreamTest(RecvmsgIntoTests, RecvmsgGenericStreamTests,
SendrecvmsgUnixStreamTestBase):
pass
@requireAttrs(socket.socket, "sendmsg", "recvmsg")
@requireAttrs(socket, "AF_UNIX", "SOL_SOCKET", "SCM_RIGHTS")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgSCMRightsStreamTest(SCMRightsTest, SendrecvmsgUnixStreamTestBase):
pass
@requireAttrs(socket.socket, "sendmsg", "recvmsg_into")
@requireAttrs(socket, "AF_UNIX", "SOL_SOCKET", "SCM_RIGHTS")
@unittest.skipUnless(thread, 'Threading required for this test.')
class RecvmsgIntoSCMRightsStreamTest(RecvmsgIntoMixin, SCMRightsTest,
SendrecvmsgUnixStreamTestBase):
pass
@ -3944,7 +3917,6 @@ class InterruptedRecvTimeoutTest(InterruptedTimeoutBase, UDPTestBase):
@requireAttrs(signal, "siginterrupt")
@unittest.skipUnless(hasattr(signal, "alarm") or hasattr(signal, "setitimer"),
"Don't have signal.alarm or signal.setitimer")
@unittest.skipUnless(thread, 'Threading required for this test.')
class InterruptedSendTimeoutTest(InterruptedTimeoutBase,
ThreadSafeCleanupTestCase,
SocketListeningTestMixin, TCPTestBase):
@ -3997,7 +3969,6 @@ class InterruptedSendTimeoutTest(InterruptedTimeoutBase,
self.checkInterruptedSend(self.serv_conn.sendmsg, [b"a"*512])
@unittest.skipUnless(thread, 'Threading required for this test.')
class TCPCloserTest(ThreadedTCPSocketTest):
def testClose(self):
@ -4017,7 +3988,7 @@ class TCPCloserTest(ThreadedTCPSocketTest):
self.cli.connect((HOST, self.port))
time.sleep(1.0)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BasicSocketPairTest(SocketPairTest):
def __init__(self, methodName='runTest'):
@ -4052,7 +4023,7 @@ class BasicSocketPairTest(SocketPairTest):
msg = self.cli.recv(1024)
self.assertEqual(msg, MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class NonBlockingTCPTests(ThreadedTCPSocketTest):
def __init__(self, methodName='runTest'):
@ -4180,7 +4151,7 @@ class NonBlockingTCPTests(ThreadedTCPSocketTest):
time.sleep(0.1)
self.cli.send(MSG)
@unittest.skipUnless(thread, 'Threading required for this test.')
class FileObjectClassTestCase(SocketConnectedTest):
"""Unit tests for the object returned by socket.makefile()
@ -4564,7 +4535,6 @@ class NetworkConnectionNoServer(unittest.TestCase):
socket.create_connection((HOST, 1234))
@unittest.skipUnless(thread, 'Threading required for this test.')
class NetworkConnectionAttributesTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
@ -4633,7 +4603,7 @@ class NetworkConnectionAttributesTest(SocketTCPTest, ThreadableTest):
self.addCleanup(self.cli.close)
self.assertEqual(self.cli.gettimeout(), 30)
@unittest.skipUnless(thread, 'Threading required for this test.')
class NetworkConnectionBehaviourTest(SocketTCPTest, ThreadableTest):
def __init__(self, methodName='runTest'):
@ -4877,7 +4847,7 @@ class TestUnixDomain(unittest.TestCase):
self.addCleanup(support.unlink, path)
self.assertEqual(self.sock.getsockname(), path)
@unittest.skipUnless(thread, 'Threading required for this test.')
class BufferIOTest(SocketConnectedTest):
"""
Test the buffer versions of socket.recv() and socket.send().
@ -5050,7 +5020,6 @@ class TIPCThreadableTest(unittest.TestCase, ThreadableTest):
self.cli.close()
@unittest.skipUnless(thread, 'Threading required for this test.')
class ContextManagersTest(ThreadedTCPSocketTest):
def _testSocketClass(self):
@ -5312,7 +5281,6 @@ class TestSocketSharing(SocketTCPTest):
source.close()
@unittest.skipUnless(thread, 'Threading required for this test.')
class SendfileUsingSendTest(ThreadedTCPSocketTest):
"""
Test the send() implementation of socket.sendfile().
@ -5570,7 +5538,6 @@ class SendfileUsingSendTest(ThreadedTCPSocketTest):
meth, file, count=-1)
@unittest.skipUnless(thread, 'Threading required for this test.')
@unittest.skipUnless(hasattr(os, "sendfile"),
'os.sendfile() required for this test.')
class SendfileUsingSendfileTest(SendfileUsingSendTest):

View File

@ -9,15 +9,13 @@ import select
import signal
import socket
import tempfile
import threading
import unittest
import socketserver
import test.support
from test.support import reap_children, reap_threads, verbose
try:
import threading
except ImportError:
threading = None
test.support.requires("network")
@ -68,7 +66,6 @@ def simple_subprocess(testcase):
testcase.assertEqual(72 << 8, status)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SocketServerTest(unittest.TestCase):
"""Test all socket servers."""
@ -306,12 +303,10 @@ class ErrorHandlerTest(unittest.TestCase):
BaseErrorTestServer(SystemExit)
self.check_result(handled=False)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threading_handled(self):
ThreadingErrorTestServer(ValueError)
self.check_result(handled=True)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_threading_not_handled(self):
ThreadingErrorTestServer(SystemExit)
self.check_result(handled=False)
@ -396,7 +391,6 @@ class SocketWriterTest(unittest.TestCase):
self.assertIsInstance(server.wfile, io.BufferedIOBase)
self.assertEqual(server.wfile_fileno, server.request_fileno)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_write(self):
# Test that wfile.write() sends data immediately, and that it does
# not truncate sends when interrupted by a Unix signal

File diff suppressed because it is too large Load Diff

View File

@ -14,6 +14,7 @@ import selectors
import sysconfig
import select
import shutil
import threading
import gc
import textwrap
@ -24,11 +25,6 @@ except ImportError:
else:
import ctypes.util
try:
import threading
except ImportError:
threading = None
try:
import _testcapi
except ImportError:
@ -1196,7 +1192,6 @@ class ProcessTestCase(BaseTestCase):
self.assertEqual(stderr, "")
self.assertEqual(proc.returncode, 0)
@unittest.skipIf(threading is None, "threading required")
def test_double_close_on_error(self):
# Issue #18851
fds = []
@ -1226,7 +1221,6 @@ class ProcessTestCase(BaseTestCase):
if exc is not None:
raise exc
@unittest.skipIf(threading is None, "threading required")
def test_threadsafe_wait(self):
"""Issue21291: Popen.wait() needs to be threadsafe for returncode."""
proc = subprocess.Popen([sys.executable, '-c',

View File

@ -10,15 +10,12 @@ import codecs
import gc
import sysconfig
import locale
import threading
# count the number of test runs, used to create unique
# strings to intern in test_intern()
numruns = 0
try:
import threading
except ImportError:
threading = None
class SysModuleTest(unittest.TestCase):
@ -172,7 +169,6 @@ class SysModuleTest(unittest.TestCase):
sys.setcheckinterval(n)
self.assertEqual(sys.getcheckinterval(), n)
@unittest.skipUnless(threading, 'Threading required for this test.')
def test_switchinterval(self):
self.assertRaises(TypeError, sys.setswitchinterval)
self.assertRaises(TypeError, sys.setswitchinterval, "a")
@ -348,21 +344,8 @@ class SysModuleTest(unittest.TestCase):
)
# sys._current_frames() is a CPython-only gimmick.
def test_current_frames(self):
have_threads = True
try:
import _thread
except ImportError:
have_threads = False
if have_threads:
self.current_frames_with_threads()
else:
self.current_frames_without_threads()
# Test sys._current_frames() in a WITH_THREADS build.
@test.support.reap_threads
def current_frames_with_threads(self):
def test_current_frames(self):
import threading
import traceback
@ -426,15 +409,6 @@ class SysModuleTest(unittest.TestCase):
leave_g.set()
t.join()
# Test sys._current_frames() when thread support doesn't exist.
def current_frames_without_threads(self):
# Not much happens here: there is only one thread, with artificial
# "thread id" 0.
d = sys._current_frames()
self.assertEqual(len(d), 1)
self.assertIn(0, d)
self.assertTrue(d[0] is sys._getframe())
def test_attributes(self):
self.assertIsInstance(sys.api_version, int)
self.assertIsInstance(sys.argv, list)
@ -516,8 +490,6 @@ class SysModuleTest(unittest.TestCase):
if not sys.platform.startswith('win'):
self.assertIsInstance(sys.abiflags, str)
@unittest.skipUnless(hasattr(sys, 'thread_info'),
'Threading required for this test.')
def test_thread_info(self):
info = sys.thread_info
self.assertEqual(len(info), 3)

View File

@ -1,11 +1,11 @@
import socket
import selectors
import telnetlib
import threading
import contextlib
from test import support
import unittest
threading = support.import_module('threading')
HOST = support.HOST

View File

@ -11,12 +11,12 @@ import importlib
import sys
import time
import shutil
import threading
import unittest
from unittest import mock
from test.support import (
verbose, import_module, run_unittest, TESTFN, reap_threads,
forget, unlink, rmtree, start_threads)
threading = import_module('threading')
def task(N, done, done_tasks, errors):
try:

View File

@ -19,9 +19,9 @@ FILES_PER_THREAD = 50
import tempfile
from test.support import start_threads, import_module
threading = import_module('threading')
import unittest
import io
import threading
from traceback import print_exc
startEvent = threading.Event()

View File

@ -9,8 +9,8 @@ from test.support.script_helper import assert_python_ok, assert_python_failure
import random
import sys
_thread = import_module('_thread')
threading = import_module('threading')
import _thread
import threading
import time
import unittest
import weakref

View File

@ -5,8 +5,8 @@ import weakref
import gc
# Modules under test
_thread = support.import_module('_thread')
threading = support.import_module('threading')
import _thread
import threading
import _threading_local

View File

@ -8,10 +8,6 @@ import sys
import sysconfig
import time
import unittest
try:
import threading
except ImportError:
threading = None
try:
import _testcapi
except ImportError:

View File

@ -7,10 +7,7 @@ from unittest.mock import patch
from test.support.script_helper import (assert_python_ok, assert_python_failure,
interpreter_requires_environment)
from test import support
try:
import threading
except ImportError:
threading = None
try:
import _testcapi
except ImportError:

View File

@ -4,13 +4,12 @@ import email
import urllib.parse
import urllib.request
import http.server
import threading
import unittest
import hashlib
from test import support
threading = support.import_module('threading')
try:
import ssl
except ImportError:
@ -276,7 +275,6 @@ class FakeProxyHandler(http.server.BaseHTTPRequestHandler):
# Test cases
@unittest.skipUnless(threading, "Threading required for this test.")
class BasicAuthTests(unittest.TestCase):
USER = "testUser"
PASSWD = "testPass"
@ -317,7 +315,6 @@ class BasicAuthTests(unittest.TestCase):
self.assertRaises(urllib.error.HTTPError, urllib.request.urlopen, self.server_url)
@unittest.skipUnless(threading, "Threading required for this test.")
class ProxyAuthTests(unittest.TestCase):
URL = "http://localhost"
@ -439,7 +436,6 @@ def GetRequestHandler(responses):
return FakeHTTPRequestHandler
@unittest.skipUnless(threading, "Threading required for this test.")
class TestUrlopen(unittest.TestCase):
"""Tests urllib.request.urlopen using the network.

View File

@ -15,15 +15,10 @@ import sys
import tempfile
from test.support import (captured_stdout, captured_stderr,
can_symlink, EnvironmentVarGuard, rmtree)
import threading
import unittest
import venv
try:
import threading
except ImportError:
threading = None
try:
import ctypes
except ImportError:
@ -420,8 +415,6 @@ class EnsurePipTest(BaseTest):
if not system_site_packages:
self.assert_pip_not_installed()
@unittest.skipUnless(threading, 'some dependencies of pip import threading'
' module unconditionally')
# Issue #26610: pip/pep425tags.py requires ctypes
@unittest.skipUnless(ctypes, 'pip requires ctypes')
def test_with_pip(self):

View File

@ -6,6 +6,7 @@ import weakref
import operator
import contextlib
import copy
import threading
import time
from test import support
@ -78,7 +79,6 @@ def collect_in_thread(period=0.0001):
"""
Ensure GC collections happen in a different thread, at a high frequency.
"""
threading = support.import_module('threading')
please_stop = False
def collect():

View File

@ -10,6 +10,7 @@ import xmlrpc.server
import http.client
import http, http.server
import socket
import threading
import re
import io
import contextlib
@ -19,10 +20,6 @@ try:
import gzip
except ImportError:
gzip = None
try:
import threading
except ImportError:
threading = None
alist = [{'astring': 'foo@bar.baz.spam',
'afloat': 7283.43,
@ -307,7 +304,6 @@ class XMLRPCTestCase(unittest.TestCase):
except OSError:
self.assertTrue(has_ssl)
@unittest.skipUnless(threading, "Threading required for this test.")
def test_keepalive_disconnect(self):
class RequestHandler(http.server.BaseHTTPRequestHandler):
protocol_version = "HTTP/1.1"
@ -747,7 +743,6 @@ def make_request_and_skipIf(condition, reason):
return make_request_and_skip
return decorator
@unittest.skipUnless(threading, 'Threading required for this test.')
class BaseServerTestCase(unittest.TestCase):
requestHandler = None
request_count = 1
@ -1206,7 +1201,6 @@ class FailingMessageClass(http.client.HTTPMessage):
return super().get(key, failobj)
@unittest.skipUnless(threading, 'Threading required for this test.')
class FailingServerTestCase(unittest.TestCase):
def setUp(self):
self.evt = threading.Event()

View File

@ -990,38 +990,12 @@ class Thread:
def _delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with _dummy_thread:
#
# Must take care to not raise an exception if _dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). _dummy_thread.get_ident() always returns 1 since
# there is only one thread if _dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return 1 from _dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
with _active_limbo_lock:
del _active[get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
def join(self, timeout=None):
"""Wait until the thread terminates.

View File

@ -61,21 +61,15 @@ import dis
import pickle
from time import monotonic as _time
try:
import threading
except ImportError:
_settrace = sys.settrace
import threading
def _unsettrace():
sys.settrace(None)
else:
def _settrace(func):
threading.settrace(func)
sys.settrace(func)
def _settrace(func):
threading.settrace(func)
sys.settrace(func)
def _unsettrace():
sys.settrace(None)
threading.settrace(None)
def _unsettrace():
sys.settrace(None)
threading.settrace(None)
PRAGMA_NOCOVER = "#pragma NO COVER"

View File

@ -12,11 +12,7 @@ import stat
import shutil
import struct
import binascii
try:
import threading
except ImportError:
import dummy_threading as threading
import threading
try:
import zlib # We may need its compression method

View File

@ -220,7 +220,6 @@ LIBC= @LIBC@
SYSLIBS= $(LIBM) $(LIBC)
SHLIBS= @SHLIBS@
THREADOBJ= @THREADOBJ@
DLINCLDIR= @DLINCLDIR@
DYNLOADFILE= @DYNLOADFILE@
MACHDEP_OBJS= @MACHDEP_OBJS@
@ -354,6 +353,7 @@ PYTHON_OBJS= \
Python/structmember.o \
Python/symtable.o \
Python/sysmodule.o \
Python/thread.o \
Python/traceback.o \
Python/getopt.o \
Python/pystrcmp.o \
@ -365,7 +365,6 @@ PYTHON_OBJS= \
Python/$(DYNLOADFILE) \
$(LIBOBJS) \
$(MACHDEP_OBJS) \
$(THREADOBJ) \
$(DTRACE_OBJS)
@ -655,12 +654,10 @@ oldsharedmods: $(SHAREDMODS)
Makefile Modules/config.c: Makefile.pre \
$(srcdir)/Modules/config.c.in \
$(MAKESETUP) \
Modules/Setup.config \
Modules/Setup \
Modules/Setup.local
$(SHELL) $(MAKESETUP) -c $(srcdir)/Modules/config.c.in \
-s Modules \
Modules/Setup.config \
Modules/Setup.local \
Modules/Setup
@mv config.c Modules
@ -1421,7 +1418,6 @@ libainstall: @DEF_MAKE_RULE@ python-config
$(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile
$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
$(INSTALL_DATA) Modules/Setup.local $(DESTDIR)$(LIBPL)/Setup.local
$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
$(INSTALL_DATA) Misc/python.pc $(DESTDIR)$(LIBPC)/python-$(VERSION).pc
$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
@ -1642,7 +1638,7 @@ distclean: clobber
if test "$$file" != "$(srcdir)/Lib/test/data/README"; then rm "$$file"; fi; \
done
-rm -f core Makefile Makefile.pre config.status \
Modules/Setup Modules/Setup.local Modules/Setup.config \
Modules/Setup Modules/Setup.local \
Modules/ld_so_aix Modules/python.exp Misc/python.pc \
Misc/python-config.sh
-rm -f python*-gdb.py

View File

@ -0,0 +1,5 @@
Remove support for building --without-threads.
This option is not really useful anymore in the 21st century. Removing lots
of conditional paths allows us to simplify the code base, including in
difficult to maintain low-level internal code.

View File

@ -1,10 +0,0 @@
# This file is transmogrified into Setup.config by config.status.
# The purpose of this file is to conditionally enable certain modules
# based on configure-time options.
# Threading
@USE_THREAD_MODULE@_thread _threadmodule.c
# The rest of the modules previously listed in this file are built
# by the setup.py script in Python 2.1 and later.

View File

@ -123,6 +123,7 @@ atexit atexitmodule.c # Register functions to be run at interpreter-shutdow
_signal signalmodule.c
_stat _stat.c # stat.h interface
time timemodule.c # -lm # time operations and variables
_thread _threadmodule.c # low-level threading interface
# access to ISO C locale support
_locale _localemodule.c # -lintl
@ -216,8 +217,6 @@ _symtable symtablemodule.c
# The crypt module is now disabled by default because it breaks builds
# on many systems (where -lcrypt is needed), e.g. Linux (I believe).
#
# First, look at Setup.config; configure may have set this for you.
#_crypt _cryptmodule.c # -lcrypt # crypt(3); needs -lcrypt on some systems
@ -308,8 +307,6 @@ _symtable symtablemodule.c
# Curses support, requiring the System V version of curses, often
# provided by the ncurses library. e.g. on Linux, link with -lncurses
# instead of -lcurses).
#
# First, look at Setup.config; configure may have set this for you.
#_curses _cursesmodule.c -lcurses -ltermcap
# Wrapper for the panel library that's part of ncurses and SYSV curses.
@ -323,18 +320,9 @@ _symtable symtablemodule.c
# implementation independent wrapper for these; dbm/dumb.py provides
# similar functionality (but slower of course) implemented in Python.
# The standard Unix dbm module has been moved to Setup.config so that
# it will be compiled as a shared library by default. Compiling it as
# a built-in module causes conflicts with the pybsddb3 module since it
# creates a static dependency on an out-of-date version of db.so.
#
# First, look at Setup.config; configure may have set this for you.
#_dbm _dbmmodule.c # dbm(3) may require -lndbm or similar
# Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm:
#
# First, look at Setup.config; configure may have set this for you.
#_gdbm _gdbmmodule.c -I/usr/local/include -L/usr/local/lib -lgdbm

View File

@ -15,9 +15,7 @@
#include "Python.h"
#include "pystrhex.h"
#ifdef WITH_THREAD
#include "pythread.h"
#endif
#include "../hashlib.h"
#include "blake2ns.h"
@ -41,9 +39,7 @@ typedef struct {
PyObject_HEAD
blake2b_param param;
blake2b_state state;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} BLAKE2bObject;
#include "clinic/blake2b_impl.c.h"
@ -60,11 +56,9 @@ new_BLAKE2bObject(PyTypeObject *type)
{
BLAKE2bObject *self;
self = (BLAKE2bObject *)type->tp_alloc(type, 0);
#ifdef WITH_THREAD
if (self != NULL) {
self->lock = NULL;
}
#endif
return self;
}
@ -292,7 +286,6 @@ _blake2b_blake2b_update(BLAKE2bObject *self, PyObject *obj)
GET_BUFFER_VIEW_OR_ERROUT(obj, &buf);
#ifdef WITH_THREAD
if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE)
self->lock = PyThread_allocate_lock();
@ -305,9 +298,6 @@ _blake2b_blake2b_update(BLAKE2bObject *self, PyObject *obj)
} else {
blake2b_update(&self->state, buf.buf, buf.len);
}
#else
blake2b_update(&self->state, buf.buf, buf.len);
#endif /* !WITH_THREAD */
PyBuffer_Release(&buf);
Py_RETURN_NONE;
@ -407,12 +397,10 @@ py_blake2b_dealloc(PyObject *self)
/* Try not to leave state in memory. */
secure_zero_memory(&obj->param, sizeof(obj->param));
secure_zero_memory(&obj->state, sizeof(obj->state));
#ifdef WITH_THREAD
if (obj->lock) {
PyThread_free_lock(obj->lock);
obj->lock = NULL;
}
#endif
PyObject_Del(self);
}

View File

@ -15,9 +15,7 @@
#include "Python.h"
#include "pystrhex.h"
#ifdef WITH_THREAD
#include "pythread.h"
#endif
#include "../hashlib.h"
#include "blake2ns.h"
@ -41,9 +39,7 @@ typedef struct {
PyObject_HEAD
blake2s_param param;
blake2s_state state;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} BLAKE2sObject;
#include "clinic/blake2s_impl.c.h"
@ -60,11 +56,9 @@ new_BLAKE2sObject(PyTypeObject *type)
{
BLAKE2sObject *self;
self = (BLAKE2sObject *)type->tp_alloc(type, 0);
#ifdef WITH_THREAD
if (self != NULL) {
self->lock = NULL;
}
#endif
return self;
}
@ -292,7 +286,6 @@ _blake2s_blake2s_update(BLAKE2sObject *self, PyObject *obj)
GET_BUFFER_VIEW_OR_ERROUT(obj, &buf);
#ifdef WITH_THREAD
if (self->lock == NULL && buf.len >= HASHLIB_GIL_MINSIZE)
self->lock = PyThread_allocate_lock();
@ -305,9 +298,6 @@ _blake2s_blake2s_update(BLAKE2sObject *self, PyObject *obj)
} else {
blake2s_update(&self->state, buf.buf, buf.len);
}
#else
blake2s_update(&self->state, buf.buf, buf.len);
#endif /* !WITH_THREAD */
PyBuffer_Release(&buf);
Py_RETURN_NONE;
@ -407,12 +397,10 @@ py_blake2s_dealloc(PyObject *self)
/* Try not to leave state in memory. */
secure_zero_memory(&obj->param, sizeof(obj->param));
secure_zero_memory(&obj->state, sizeof(obj->state));
#ifdef WITH_THREAD
if (obj->lock) {
PyThread_free_lock(obj->lock);
obj->lock = NULL;
}
#endif
PyObject_Del(self);
}

View File

@ -5,9 +5,7 @@
#include "Python.h"
#include "structmember.h"
#ifdef WITH_THREAD
#include "pythread.h"
#endif
#include <bzlib.h>
#include <stdio.h>
@ -23,7 +21,6 @@
#endif /* ! BZ_CONFIG_ERROR */
#ifdef WITH_THREAD
#define ACQUIRE_LOCK(obj) do { \
if (!PyThread_acquire_lock((obj)->lock, 0)) { \
Py_BEGIN_ALLOW_THREADS \
@ -31,19 +28,13 @@
Py_END_ALLOW_THREADS \
} } while (0)
#define RELEASE_LOCK(obj) PyThread_release_lock((obj)->lock)
#else
#define ACQUIRE_LOCK(obj)
#define RELEASE_LOCK(obj)
#endif
typedef struct {
PyObject_HEAD
bz_stream bzs;
int flushed;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} BZ2Compressor;
typedef struct {
@ -59,9 +50,7 @@ typedef struct {
separately. Conversion and looping is encapsulated in
decompress_buf() */
size_t bzs_avail_in_real;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} BZ2Decompressor;
static PyTypeObject BZ2Compressor_Type;
@ -325,13 +314,11 @@ _bz2_BZ2Compressor___init___impl(BZ2Compressor *self, int compresslevel)
return -1;
}
#ifdef WITH_THREAD
self->lock = PyThread_allocate_lock();
if (self->lock == NULL) {
PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock");
return -1;
}
#endif
self->bzs.opaque = NULL;
self->bzs.bzalloc = BZ2_Malloc;
@ -343,10 +330,8 @@ _bz2_BZ2Compressor___init___impl(BZ2Compressor *self, int compresslevel)
return 0;
error:
#ifdef WITH_THREAD
PyThread_free_lock(self->lock);
self->lock = NULL;
#endif
return -1;
}
@ -354,10 +339,8 @@ static void
BZ2Compressor_dealloc(BZ2Compressor *self)
{
BZ2_bzCompressEnd(&self->bzs);
#ifdef WITH_THREAD
if (self->lock != NULL)
PyThread_free_lock(self->lock);
#endif
Py_TYPE(self)->tp_free((PyObject *)self);
}
@ -651,13 +634,11 @@ _bz2_BZ2Decompressor___init___impl(BZ2Decompressor *self)
{
int bzerror;
#ifdef WITH_THREAD
self->lock = PyThread_allocate_lock();
if (self->lock == NULL) {
PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock");
return -1;
}
#endif
self->needs_input = 1;
self->bzs_avail_in_real = 0;
@ -675,10 +656,8 @@ _bz2_BZ2Decompressor___init___impl(BZ2Decompressor *self)
error:
Py_CLEAR(self->unused_data);
#ifdef WITH_THREAD
PyThread_free_lock(self->lock);
self->lock = NULL;
#endif
return -1;
}
@ -689,10 +668,8 @@ BZ2Decompressor_dealloc(BZ2Decompressor *self)
PyMem_Free(self->input_buffer);
BZ2_bzDecompressEnd(&self->bzs);
Py_CLEAR(self->unused_data);
#ifdef WITH_THREAD
if (self->lock != NULL)
PyThread_free_lock(self->lock);
#endif
Py_TYPE(self)->tp_free((PyObject *)self);
}

View File

@ -5410,9 +5410,7 @@ PyInit__ctypes(void)
ob_type is the metatype (the 'type'), defaults to PyType_Type,
tp_base is the base type, defaults to 'object' aka PyBaseObject_Type.
*/
#ifdef WITH_THREAD
PyEval_InitThreads();
#endif
m = PyModule_Create(&_ctypesmodule);
if (!m)
return NULL;

View File

@ -137,9 +137,7 @@ static void _CallPythonObject(void *mem,
Py_ssize_t nArgs;
PyObject *error_object = NULL;
int *space;
#ifdef WITH_THREAD
PyGILState_STATE state = PyGILState_Ensure();
#endif
nArgs = PySequence_Length(converters);
/* Hm. What to return in case of error?
@ -281,9 +279,7 @@ if (x == NULL) _PyTraceback_Add(what, "_ctypes/callbacks.c", __LINE__ - 1), PyEr
Py_XDECREF(result);
Done:
Py_XDECREF(arglist);
#ifdef WITH_THREAD
PyGILState_Release(state);
#endif
}
static void closure_fcn(ffi_cif *cif,
@ -347,7 +343,7 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable,
assert(CThunk_CheckExact((PyObject *)p));
p->pcl_write = ffi_closure_alloc(sizeof(ffi_closure),
&p->pcl_exec);
&p->pcl_exec);
if (p->pcl_write == NULL) {
PyErr_NoMemory();
goto error;
@ -397,8 +393,8 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable,
result = ffi_prep_closure(p->pcl_write, &p->cif, closure_fcn, p);
#else
result = ffi_prep_closure_loc(p->pcl_write, &p->cif, closure_fcn,
p,
p->pcl_exec);
p,
p->pcl_exec);
#endif
if (result != FFI_OK) {
PyErr_Format(PyExc_RuntimeError,
@ -422,9 +418,7 @@ CThunkObject *_ctypes_alloc_callback(PyObject *callable,
static void LoadPython(void)
{
if (!Py_IsInitialized()) {
#ifdef WITH_THREAD
PyEval_InitThreads();
#endif
Py_Initialize();
}
}
@ -495,18 +489,12 @@ STDAPI DllGetClassObject(REFCLSID rclsid,
LPVOID *ppv)
{
long result;
#ifdef WITH_THREAD
PyGILState_STATE state;
#endif
LoadPython();
#ifdef WITH_THREAD
state = PyGILState_Ensure();
#endif
result = Call_GetClassObject(rclsid, riid, ppv);
#ifdef WITH_THREAD
PyGILState_Release(state);
#endif
return result;
}
@ -558,13 +546,9 @@ long Call_CanUnloadNow(void)
STDAPI DllCanUnloadNow(void)
{
long result;
#ifdef WITH_THREAD
PyGILState_STATE state = PyGILState_Ensure();
#endif
result = Call_CanUnloadNow();
#ifdef WITH_THREAD
PyGILState_Release(state);
#endif
return result;
}

View File

@ -745,9 +745,7 @@ static int _call_function_pointer(int flags,
void *resmem,
int argcount)
{
#ifdef WITH_THREAD
PyThreadState *_save = NULL; /* For Py_BLOCK_THREADS and Py_UNBLOCK_THREADS */
#endif
PyObject *error_object = NULL;
int *space;
ffi_cif cif;
@ -786,10 +784,8 @@ static int _call_function_pointer(int flags,
if (error_object == NULL)
return -1;
}
#ifdef WITH_THREAD
if ((flags & FUNCFLAG_PYTHONAPI) == 0)
Py_UNBLOCK_THREADS
#endif
if (flags & FUNCFLAG_USE_ERRNO) {
int temp = space[0];
space[0] = errno;
@ -826,10 +822,8 @@ static int _call_function_pointer(int flags,
space[0] = errno;
errno = temp;
}
#ifdef WITH_THREAD
if ((flags & FUNCFLAG_PYTHONAPI) == 0)
Py_BLOCK_THREADS
#endif
Py_XDECREF(error_object);
#ifdef MS_WIN32
#ifndef DONT_USE_SEH
@ -982,9 +976,7 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk)
/* We absolutely have to release the GIL during COM method calls,
otherwise we may get a deadlock!
*/
#ifdef WITH_THREAD
Py_BEGIN_ALLOW_THREADS
#endif
hr = pIunk->lpVtbl->QueryInterface(pIunk, &IID_ISupportErrorInfo, (void **)&psei);
if (FAILED(hr))
@ -1008,9 +1000,7 @@ GetComError(HRESULT errcode, GUID *riid, IUnknown *pIunk)
pei->lpVtbl->Release(pei);
failed:
#ifdef WITH_THREAD
Py_END_ALLOW_THREADS
#endif
progid = NULL;
ProgIDFromCLSID(&guid, &progid);

View File

@ -53,9 +53,7 @@ typedef struct {
PyObject_HEAD
PyObject *name; /* name of this hash algorithm */
EVP_MD_CTX *ctx; /* OpenSSL message digest context */
#ifdef WITH_THREAD
PyThread_type_lock lock; /* OpenSSL context lock */
#endif
} EVPobject;
@ -122,9 +120,7 @@ newEVPobject(PyObject *name)
/* save the name for .name to return */
Py_INCREF(name);
retval->name = name;
#ifdef WITH_THREAD
retval->lock = NULL;
#endif
return retval;
}
@ -153,10 +149,8 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
static void
EVP_dealloc(EVPobject *self)
{
#ifdef WITH_THREAD
if (self->lock != NULL)
PyThread_free_lock(self->lock);
#endif
EVP_MD_CTX_free(self->ctx);
Py_XDECREF(self->name);
PyObject_Del(self);
@ -267,7 +261,6 @@ EVP_update(EVPobject *self, PyObject *args)
GET_BUFFER_VIEW_OR_ERROUT(obj, &view);
#ifdef WITH_THREAD
if (self->lock == NULL && view.len >= HASHLIB_GIL_MINSIZE) {
self->lock = PyThread_allocate_lock();
/* fail? lock = NULL and we fail over to non-threaded code. */
@ -282,9 +275,6 @@ EVP_update(EVPobject *self, PyObject *args)
} else {
EVP_hash(self, view.buf, view.len);
}
#else
EVP_hash(self, view.buf, view.len);
#endif
PyBuffer_Release(&view);
Py_RETURN_NONE;

View File

@ -230,10 +230,8 @@ typedef struct {
isn't ready for writing. */
Py_off_t write_end;
#ifdef WITH_THREAD
PyThread_type_lock lock;
volatile unsigned long owner;
#endif
Py_ssize_t buffer_size;
Py_ssize_t buffer_mask;
@ -267,8 +265,6 @@ typedef struct {
/* These macros protect the buffered object against concurrent operations. */
#ifdef WITH_THREAD
static int
_enter_buffered_busy(buffered *self)
{
@ -315,11 +311,6 @@ _enter_buffered_busy(buffered *self)
PyThread_release_lock(self->lock); \
} while(0);
#else
#define ENTER_BUFFERED(self) 1
#define LEAVE_BUFFERED(self)
#endif
#define CHECK_INITIALIZED(self) \
if (self->ok <= 0) { \
if (self->detached) { \
@ -401,12 +392,10 @@ buffered_dealloc(buffered *self)
PyMem_Free(self->buffer);
self->buffer = NULL;
}
#ifdef WITH_THREAD
if (self->lock) {
PyThread_free_lock(self->lock);
self->lock = NULL;
}
#endif
Py_CLEAR(self->dict);
Py_TYPE(self)->tp_free((PyObject *)self);
}
@ -753,7 +742,6 @@ _buffered_init(buffered *self)
PyErr_NoMemory();
return -1;
}
#ifdef WITH_THREAD
if (self->lock)
PyThread_free_lock(self->lock);
self->lock = PyThread_allocate_lock();
@ -762,7 +750,6 @@ _buffered_init(buffered *self)
return -1;
}
self->owner = 0;
#endif
/* Find out whether buffer_size is a power of 2 */
/* XXX is this optimization useful? */
for (n = self->buffer_size - 1; n & 1; n >>= 1)

View File

@ -9,16 +9,13 @@
#include "Python.h"
#include "structmember.h"
#ifdef WITH_THREAD
#include "pythread.h"
#endif
#include <stdarg.h>
#include <string.h>
#include <lzma.h>
#ifdef WITH_THREAD
#define ACQUIRE_LOCK(obj) do { \
if (!PyThread_acquire_lock((obj)->lock, 0)) { \
Py_BEGIN_ALLOW_THREADS \
@ -26,10 +23,6 @@
Py_END_ALLOW_THREADS \
} } while (0)
#define RELEASE_LOCK(obj) PyThread_release_lock((obj)->lock)
#else
#define ACQUIRE_LOCK(obj)
#define RELEASE_LOCK(obj)
#endif
/* Container formats: */
@ -48,9 +41,7 @@ typedef struct {
lzma_allocator alloc;
lzma_stream lzs;
int flushed;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} Compressor;
typedef struct {
@ -63,9 +54,7 @@ typedef struct {
char needs_input;
uint8_t *input_buffer;
size_t input_buffer_size;
#ifdef WITH_THREAD
PyThread_type_lock lock;
#endif
} Decompressor;
/* LZMAError class object. */
@ -757,13 +746,11 @@ Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs)
self->alloc.free = PyLzma_Free;
self->lzs.allocator = &self->alloc;
#ifdef WITH_THREAD
self->lock = PyThread_allocate_lock();
if (self->lock == NULL) {
PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock");
return -1;
}
#endif
self->flushed = 0;
switch (format) {
@ -790,10 +777,8 @@ Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs)
break;
}
#ifdef WITH_THREAD
PyThread_free_lock(self->lock);
self->lock = NULL;
#endif
return -1;
}
@ -801,10 +786,8 @@ static void
Compressor_dealloc(Compressor *self)
{
lzma_end(&self->lzs);
#ifdef WITH_THREAD
if (self->lock != NULL)
PyThread_free_lock(self->lock);
#endif
Py_TYPE(self)->tp_free((PyObject *)self);
}
@ -1180,13 +1163,11 @@ _lzma_LZMADecompressor___init___impl(Decompressor *self, int format,
self->lzs.allocator = &self->alloc;
self->lzs.next_in = NULL;
#ifdef WITH_THREAD
self->lock = PyThread_allocate_lock();
if (self->lock == NULL) {
PyErr_SetString(PyExc_MemoryError, "Unable to allocate lock");
return -1;
}
#endif
self->check = LZMA_CHECK_UNKNOWN;
self->needs_input = 1;
@ -1230,10 +1211,8 @@ _lzma_LZMADecompressor___init___impl(Decompressor *self, int format,
error:
Py_CLEAR(self->unused_data);
#ifdef WITH_THREAD
PyThread_free_lock(self->lock);
self->lock = NULL;
#endif
return -1;
}
@ -1245,10 +1224,8 @@ Decompressor_dealloc(Decompressor *self)
lzma_end(&self->lzs);
Py_CLEAR(self->unused_data);
#ifdef WITH_THREAD
if (self->lock != NULL)
PyThread_free_lock(self->lock);
#endif
Py_TYPE(self)->tp_free((PyObject *)self);
}

Some files were not shown because too many files have changed in this diff Show More