2003-05-04 18:15:27 -03:00
|
|
|
"""Supporting definitions for the Python regression tests."""
|
2000-07-24 03:55:00 -03:00
|
|
|
|
2002-07-30 20:27:12 -03:00
|
|
|
if __name__ != 'test.test_support':
|
2007-04-25 15:54:36 -03:00
|
|
|
raise ImportError('test_support must be imported from the test package')
|
2002-07-30 20:27:12 -03:00
|
|
|
|
2007-03-12 23:34:09 -03:00
|
|
|
import contextlib
|
|
|
|
import errno
|
|
|
|
import socket
|
2001-03-21 14:26:33 -04:00
|
|
|
import sys
|
2007-04-25 15:54:36 -03:00
|
|
|
import os
|
|
|
|
import os.path
|
2008-02-24 14:47:03 -04:00
|
|
|
import shutil
|
2006-12-13 19:09:53 -04:00
|
|
|
import warnings
|
2007-04-25 15:54:36 -03:00
|
|
|
import unittest
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2000-07-24 03:55:00 -03:00
|
|
|
class Error(Exception):
|
2000-10-23 14:22:08 -03:00
|
|
|
"""Base class for regression test exceptions."""
|
2000-07-24 03:55:00 -03:00
|
|
|
|
|
|
|
class TestFailed(Error):
|
2000-10-23 14:22:08 -03:00
|
|
|
"""Test failed."""
|
2000-07-24 03:55:00 -03:00
|
|
|
|
|
|
|
class TestSkipped(Error):
|
2000-10-23 14:22:08 -03:00
|
|
|
"""Test skipped.
|
2000-07-24 03:55:00 -03:00
|
|
|
|
2000-10-23 14:22:08 -03:00
|
|
|
This can be raised to indicate that a test was deliberatly
|
|
|
|
skipped, but not because a feature wasn't available. For
|
|
|
|
example, if some resource can't be used, such as the network
|
|
|
|
appears to be unavailable, this should be raised instead of
|
|
|
|
TestFailed.
|
|
|
|
"""
|
1992-01-27 13:00:37 -04:00
|
|
|
|
2003-02-03 11:19:30 -04:00
|
|
|
class ResourceDenied(TestSkipped):
|
|
|
|
"""Test skipped because it requested a disallowed resource.
|
|
|
|
|
|
|
|
This is raised when a test calls requires() for a resource that
|
|
|
|
has not be enabled. It is used to distinguish between expected
|
|
|
|
and unexpected skips.
|
|
|
|
"""
|
|
|
|
|
2001-08-20 19:29:23 -03:00
|
|
|
verbose = 1 # Flag set to 0 by regrtest.py
|
2006-04-26 12:53:30 -03:00
|
|
|
use_resources = None # Flag set to [] by regrtest.py
|
|
|
|
max_memuse = 0 # Disable bigmem tests (they will still be run with
|
|
|
|
# small sizes, to make sure they work.)
|
1996-12-19 22:58:22 -04:00
|
|
|
|
2001-09-25 17:05:11 -03:00
|
|
|
# _original_stdout is meant to hold stdout at the time regrtest began.
|
|
|
|
# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
|
|
|
|
# The point is to have some flavor of stdout the user can actually see.
|
|
|
|
_original_stdout = None
|
|
|
|
def record_original_stdout(stdout):
|
|
|
|
global _original_stdout
|
|
|
|
_original_stdout = stdout
|
|
|
|
|
|
|
|
def get_original_stdout():
|
|
|
|
return _original_stdout or sys.stdout
|
|
|
|
|
1992-01-27 13:00:37 -04:00
|
|
|
def unload(name):
|
2000-10-23 14:22:08 -03:00
|
|
|
try:
|
|
|
|
del sys.modules[name]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
1992-01-27 13:00:37 -04:00
|
|
|
|
2006-01-23 03:51:27 -04:00
|
|
|
def unlink(filename):
|
|
|
|
try:
|
|
|
|
os.unlink(filename)
|
|
|
|
except OSError:
|
|
|
|
pass
|
|
|
|
|
2008-02-24 14:47:03 -04:00
|
|
|
def rmtree(path):
|
|
|
|
try:
|
|
|
|
shutil.rmtree(path)
|
|
|
|
except OSError, e:
|
|
|
|
# Unix returns ENOENT, Windows returns ESRCH.
|
|
|
|
if e.errno not in (errno.ENOENT, errno.ESRCH):
|
|
|
|
raise
|
|
|
|
|
1992-01-27 13:00:37 -04:00
|
|
|
def forget(modname):
|
2003-05-04 18:15:27 -03:00
|
|
|
'''"Forget" a module was ever imported by removing it from sys.modules and
|
|
|
|
deleting any .pyc and .pyo files.'''
|
2000-10-23 14:22:08 -03:00
|
|
|
unload(modname)
|
|
|
|
for dirname in sys.path:
|
2006-01-23 03:51:27 -04:00
|
|
|
unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
|
2003-05-04 18:15:27 -03:00
|
|
|
# Deleting the .pyo file cannot be within the 'try' for the .pyc since
|
|
|
|
# the chance exists that there is no .pyc (and thus the 'try' statement
|
|
|
|
# is exited) but there is a .pyo file.
|
2006-01-23 03:51:27 -04:00
|
|
|
unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
|
1992-01-27 13:00:37 -04:00
|
|
|
|
2002-12-03 23:26:57 -04:00
|
|
|
def is_resource_enabled(resource):
|
2003-05-04 18:15:27 -03:00
|
|
|
"""Test whether a resource is enabled. Known resources are set by
|
|
|
|
regrtest.py."""
|
2002-12-03 23:26:57 -04:00
|
|
|
return use_resources is not None and resource in use_resources
|
|
|
|
|
2001-08-20 19:29:23 -03:00
|
|
|
def requires(resource, msg=None):
|
2003-05-04 18:15:27 -03:00
|
|
|
"""Raise ResourceDenied if the specified resource is not available.
|
|
|
|
|
|
|
|
If the caller's module is __main__ then automatically return True. The
|
|
|
|
possibility of False being returned occurs when regrtest.py is executing."""
|
2003-04-24 16:06:57 -03:00
|
|
|
# see if the caller's module is __main__ - if so, treat as if
|
|
|
|
# the resource was set
|
|
|
|
if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
|
|
|
|
return
|
2002-12-03 23:26:57 -04:00
|
|
|
if not is_resource_enabled(resource):
|
2001-08-20 19:29:23 -03:00
|
|
|
if msg is None:
|
|
|
|
msg = "Use of the `%s' resource not enabled" % resource
|
2003-02-03 11:19:30 -04:00
|
|
|
raise ResourceDenied(msg)
|
2001-08-20 19:29:23 -03:00
|
|
|
|
2006-06-11 23:13:21 -03:00
|
|
|
def bind_port(sock, host='', preferred_port=54321):
|
|
|
|
"""Try to bind the sock to a port. If we are running multiple
|
2008-02-25 01:33:33 -04:00
|
|
|
tests and we don't try multiple ports, the test can fail. This
|
2006-06-11 23:13:21 -03:00
|
|
|
makes the test more robust."""
|
|
|
|
|
2007-10-13 21:18:40 -03:00
|
|
|
# Find some random ports that hopefully no one is listening on.
|
|
|
|
# Ideally each test would clean up after itself and not continue listening
|
|
|
|
# on any ports. However, this isn't the case. The last port (0) is
|
|
|
|
# a stop-gap that asks the O/S to assign a port. Whenever the warning
|
|
|
|
# message below is printed, the test that is listening on the port should
|
|
|
|
# be fixed to close the socket at the end of the test.
|
|
|
|
# Another reason why we can't use a port is another process (possibly
|
|
|
|
# another instance of the test suite) is using the same port.
|
|
|
|
for port in [preferred_port, 9907, 10243, 32999, 0]:
|
2006-06-11 23:13:21 -03:00
|
|
|
try:
|
|
|
|
sock.bind((host, port))
|
2007-10-13 21:18:40 -03:00
|
|
|
if port == 0:
|
|
|
|
port = sock.getsockname()[1]
|
2006-06-11 23:13:21 -03:00
|
|
|
return port
|
|
|
|
except socket.error, (err, msg):
|
|
|
|
if err != errno.EADDRINUSE:
|
|
|
|
raise
|
|
|
|
print >>sys.__stderr__, \
|
|
|
|
' WARNING: failed to listen on port %d, trying another' % port
|
2007-04-25 15:54:36 -03:00
|
|
|
raise TestFailed('unable to find port to listen on')
|
2006-06-11 23:13:21 -03:00
|
|
|
|
1993-01-26 09:04:43 -04:00
|
|
|
FUZZ = 1e-6
|
|
|
|
|
|
|
|
def fcmp(x, y): # fuzzy comparison function
|
2000-10-23 14:22:08 -03:00
|
|
|
if type(x) == type(0.0) or type(y) == type(0.0):
|
|
|
|
try:
|
|
|
|
x, y = coerce(x, y)
|
|
|
|
fuzz = (abs(x) + abs(y)) * FUZZ
|
|
|
|
if abs(x-y) <= fuzz:
|
|
|
|
return 0
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
elif type(x) == type(y) and type(x) in (type(()), type([])):
|
|
|
|
for i in range(min(len(x), len(y))):
|
|
|
|
outcome = fcmp(x[i], y[i])
|
2000-12-12 19:11:42 -04:00
|
|
|
if outcome != 0:
|
2000-10-23 14:22:08 -03:00
|
|
|
return outcome
|
|
|
|
return cmp(len(x), len(y))
|
|
|
|
return cmp(x, y)
|
1993-01-26 09:04:43 -04:00
|
|
|
|
2001-08-17 15:39:25 -03:00
|
|
|
try:
|
|
|
|
unicode
|
2007-04-25 14:57:53 -03:00
|
|
|
have_unicode = True
|
2001-08-17 15:39:25 -03:00
|
|
|
except NameError:
|
2007-04-25 14:57:53 -03:00
|
|
|
have_unicode = False
|
2001-08-17 15:39:25 -03:00
|
|
|
|
2002-11-01 14:02:03 -04:00
|
|
|
is_jython = sys.platform.startswith('java')
|
|
|
|
|
2001-03-23 14:04:02 -04:00
|
|
|
# Filename used for testing
|
|
|
|
if os.name == 'java':
|
|
|
|
# Jython disallows @ in module names
|
|
|
|
TESTFN = '$test'
|
2003-05-10 04:36:56 -03:00
|
|
|
elif os.name == 'riscos':
|
|
|
|
TESTFN = 'testfile'
|
|
|
|
else:
|
2001-03-23 14:04:02 -04:00
|
|
|
TESTFN = '@test'
|
2001-05-13 05:04:26 -03:00
|
|
|
# Unicode name only used if TEST_FN_ENCODING exists for the platform.
|
2001-08-17 15:39:25 -03:00
|
|
|
if have_unicode:
|
2003-12-02 21:27:23 -04:00
|
|
|
# Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
|
|
|
|
# TESTFN_UNICODE is a filename that can be encoded using the
|
|
|
|
# file system encoding, but *not* with the default (ascii) encoding
|
2002-11-09 15:57:26 -04:00
|
|
|
if isinstance('', unicode):
|
|
|
|
# python -U
|
|
|
|
# XXX perhaps unicode() should accept Unicode strings?
|
2003-12-04 01:39:43 -04:00
|
|
|
TESTFN_UNICODE = "@test-\xe0\xf2"
|
2002-11-09 15:57:26 -04:00
|
|
|
else:
|
2003-12-04 01:39:43 -04:00
|
|
|
# 2 latin characters.
|
|
|
|
TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1")
|
|
|
|
TESTFN_ENCODING = sys.getfilesystemencoding()
|
|
|
|
# TESTFN_UNICODE_UNENCODEABLE is a filename that should *not* be
|
2003-12-02 21:27:23 -04:00
|
|
|
# able to be encoded by *either* the default or filesystem encoding.
|
2003-12-04 01:39:43 -04:00
|
|
|
# This test really only makes sense on Windows NT platforms
|
2003-12-03 18:16:47 -04:00
|
|
|
# which have special Unicode support in posixmodule.
|
2003-12-04 01:39:43 -04:00
|
|
|
if (not hasattr(sys, "getwindowsversion") or
|
|
|
|
sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME
|
2004-01-18 16:29:55 -04:00
|
|
|
TESTFN_UNICODE_UNENCODEABLE = None
|
2003-12-02 21:27:23 -04:00
|
|
|
else:
|
2003-12-03 18:16:47 -04:00
|
|
|
# Japanese characters (I think - from bug 846133)
|
2005-03-08 11:03:08 -04:00
|
|
|
TESTFN_UNICODE_UNENCODEABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"')
|
2003-12-03 18:16:47 -04:00
|
|
|
try:
|
|
|
|
# XXX - Note - should be using TESTFN_ENCODING here - but for
|
2003-12-04 01:39:43 -04:00
|
|
|
# Windows, "mbcs" currently always operates as if in
|
2003-12-03 18:16:47 -04:00
|
|
|
# errors=ignore' mode - hence we get '?' characters rather than
|
|
|
|
# the exception. 'Latin1' operates as we expect - ie, fails.
|
|
|
|
# See [ 850997 ] mbcs encoding ignores errors
|
|
|
|
TESTFN_UNICODE_UNENCODEABLE.encode("Latin1")
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
print \
|
|
|
|
'WARNING: The filename %r CAN be encoded by the filesystem. ' \
|
|
|
|
'Unicode filename tests may not be effective' \
|
|
|
|
% TESTFN_UNICODE_UNENCODEABLE
|
2002-11-02 20:35:53 -04:00
|
|
|
|
|
|
|
# Make sure we can write to TESTFN, try in /tmp if we can't
|
|
|
|
fp = None
|
|
|
|
try:
|
|
|
|
fp = open(TESTFN, 'w+')
|
|
|
|
except IOError:
|
|
|
|
TMP_TESTFN = os.path.join('/tmp', TESTFN)
|
|
|
|
try:
|
|
|
|
fp = open(TMP_TESTFN, 'w+')
|
|
|
|
TESTFN = TMP_TESTFN
|
|
|
|
del TMP_TESTFN
|
|
|
|
except IOError:
|
2002-11-09 01:26:15 -04:00
|
|
|
print ('WARNING: tests will fail, unable to write to: %s or %s' %
|
2002-11-02 20:35:53 -04:00
|
|
|
(TESTFN, TMP_TESTFN))
|
|
|
|
if fp is not None:
|
|
|
|
fp.close()
|
2006-01-23 03:51:27 -04:00
|
|
|
unlink(TESTFN)
|
2007-04-25 15:54:36 -03:00
|
|
|
del fp
|
2001-03-13 05:31:07 -04:00
|
|
|
|
1998-04-23 17:13:30 -03:00
|
|
|
def findfile(file, here=__file__):
|
2003-05-04 18:15:27 -03:00
|
|
|
"""Try to find a file on sys.path and the working directory. If it is not
|
|
|
|
found the argument passed to the function is returned (this does not
|
|
|
|
necessarily signal failure; could still be the legitimate path)."""
|
2000-10-23 14:22:08 -03:00
|
|
|
if os.path.isabs(file):
|
|
|
|
return file
|
|
|
|
path = sys.path
|
|
|
|
path = [os.path.dirname(here)] + path
|
|
|
|
for dn in path:
|
|
|
|
fn = os.path.join(dn, file)
|
|
|
|
if os.path.exists(fn): return fn
|
|
|
|
return file
|
2001-01-17 15:11:13 -04:00
|
|
|
|
|
|
|
def verify(condition, reason='test failed'):
|
2001-01-19 15:01:56 -04:00
|
|
|
"""Verify that condition is true. If not, raise TestFailed.
|
2001-01-17 22:22:22 -04:00
|
|
|
|
2001-01-20 15:12:54 -04:00
|
|
|
The optional argument reason can be given to provide
|
2001-01-19 01:59:21 -04:00
|
|
|
a better error text.
|
2001-01-17 22:22:22 -04:00
|
|
|
"""
|
2001-01-19 01:59:21 -04:00
|
|
|
|
2001-01-17 22:22:22 -04:00
|
|
|
if not condition:
|
2001-01-19 15:01:56 -04:00
|
|
|
raise TestFailed(reason)
|
2001-02-19 11:35:26 -04:00
|
|
|
|
2001-10-30 19:20:46 -04:00
|
|
|
def vereq(a, b):
|
2001-12-29 13:34:57 -04:00
|
|
|
"""Raise TestFailed if a == b is false.
|
|
|
|
|
|
|
|
This is better than verify(a == b) because, in case of failure, the
|
|
|
|
error message incorporates repr(a) and repr(b) so you can see the
|
|
|
|
inputs.
|
|
|
|
|
|
|
|
Note that "not (a == b)" isn't necessarily the same as "a != b"; the
|
|
|
|
former is tested.
|
|
|
|
"""
|
|
|
|
|
2001-10-30 19:20:46 -04:00
|
|
|
if not (a == b):
|
2007-04-25 15:54:36 -03:00
|
|
|
raise TestFailed("%r == %r" % (a, b))
|
2001-10-30 19:20:46 -04:00
|
|
|
|
Get rid of the superstitious "~" in dict hashing's "i = (~hash) & mask".
The comment following used to say:
/* We use ~hash instead of hash, as degenerate hash functions, such
as for ints <sigh>, can have lots of leading zeros. It's not
really a performance risk, but better safe than sorry.
12-Dec-00 tim: so ~hash produces lots of leading ones instead --
what's the gain? */
That is, there was never a good reason for doing it. And to the contrary,
as explained on Python-Dev last December, it tended to make the *sum*
(i + incr) & mask (which is the first table index examined in case of
collison) the same "too often" across distinct hashes.
Changing to the simpler "i = hash & mask" reduced the number of string-dict
collisions (== # number of times we go around the lookup for-loop) from about
6 million to 5 million during a full run of the test suite (these are
approximate because the test suite does some random stuff from run to run).
The number of collisions in non-string dicts also decreased, but not as
dramatically.
Note that this may, for a given dict, change the order (wrt previous
releases) of entries exposed by .keys(), .values() and .items(). A number
of std tests suffered bogus failures as a result. For dicts keyed by
small ints, or (less so) by characters, the order is much more likely to be
in increasing order of key now; e.g.,
>>> d = {}
>>> for i in range(10):
... d[i] = i
...
>>> d
{0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9}
>>>
Unfortunately. people may latch on to that in small examples and draw a
bogus conclusion.
test_support.py
Moved test_extcall's sortdict() into test_support, made it stronger,
and imported sortdict into other std tests that needed it.
test_unicode.py
Excluced cp875 from the "roundtrip over range(128)" test, because
cp875 doesn't have a well-defined inverse for unicode("?", "cp875").
See Python-Dev for excruciating details.
Cookie.py
Chaged various output functions to sort dicts before building
strings from them.
test_extcall
Fiddled the expected-result file. This remains sensitive to native
dict ordering, because, e.g., if there are multiple errors in a
keyword-arg dict (and test_extcall sets up many cases like that), the
specific error Python complains about first depends on native dict
ordering.
2001-05-12 21:19:31 -03:00
|
|
|
def sortdict(dict):
|
|
|
|
"Like repr(dict), but in sorted order."
|
|
|
|
items = dict.items()
|
|
|
|
items.sort()
|
|
|
|
reprpairs = ["%r: %r" % pair for pair in items]
|
|
|
|
withcommas = ", ".join(reprpairs)
|
|
|
|
return "{%s}" % withcommas
|
|
|
|
|
2006-10-28 10:10:17 -03:00
|
|
|
def check_syntax_error(testcase, statement):
|
2001-02-19 11:35:26 -04:00
|
|
|
try:
|
2006-10-28 10:10:17 -03:00
|
|
|
compile(statement, '<test string>', 'exec')
|
2001-02-19 11:35:26 -04:00
|
|
|
except SyntaxError:
|
|
|
|
pass
|
|
|
|
else:
|
2006-10-28 10:10:17 -03:00
|
|
|
testcase.fail('Missing SyntaxError: "%s"' % statement)
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2005-12-10 13:44:27 -04:00
|
|
|
def open_urlresource(url):
|
|
|
|
import urllib, urlparse
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2007-05-09 01:14:36 -03:00
|
|
|
requires('urlfetch')
|
2005-12-10 13:44:27 -04:00
|
|
|
filename = urlparse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
|
|
|
|
|
|
|
|
for path in [os.path.curdir, os.path.pardir]:
|
|
|
|
fn = os.path.join(path, filename)
|
|
|
|
if os.path.exists(fn):
|
|
|
|
return open(fn)
|
|
|
|
|
|
|
|
print >> get_original_stdout(), '\tfetching %s ...' % url
|
|
|
|
fn, _ = urllib.urlretrieve(url, filename)
|
|
|
|
return open(fn)
|
2007-01-29 23:03:46 -04:00
|
|
|
|
2007-01-03 20:23:49 -04:00
|
|
|
|
2007-04-03 15:33:29 -03:00
|
|
|
class WarningMessage(object):
|
|
|
|
"Holds the result of the latest showwarning() call"
|
|
|
|
def __init__(self):
|
|
|
|
self.message = None
|
|
|
|
self.category = None
|
|
|
|
self.filename = None
|
|
|
|
self.lineno = None
|
|
|
|
|
|
|
|
def _showwarning(self, message, category, filename, lineno, file=None):
|
|
|
|
self.message = message
|
|
|
|
self.category = category
|
|
|
|
self.filename = filename
|
|
|
|
self.lineno = lineno
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def catch_warning():
|
|
|
|
"""
|
|
|
|
Guard the warnings filter from being permanently changed and record the
|
|
|
|
data of the last warning that has been issued.
|
|
|
|
|
|
|
|
Use like this:
|
|
|
|
|
2007-08-14 00:38:47 -03:00
|
|
|
with catch_warning() as w:
|
2007-04-03 15:33:29 -03:00
|
|
|
warnings.warn("foo")
|
|
|
|
assert str(w.message) == "foo"
|
|
|
|
"""
|
|
|
|
warning = WarningMessage()
|
|
|
|
original_filters = warnings.filters[:]
|
|
|
|
original_showwarning = warnings.showwarning
|
|
|
|
warnings.showwarning = warning._showwarning
|
|
|
|
try:
|
|
|
|
yield warning
|
|
|
|
finally:
|
|
|
|
warnings.showwarning = original_showwarning
|
|
|
|
warnings.filters = original_filters
|
|
|
|
|
2007-01-03 20:23:49 -04:00
|
|
|
class EnvironmentVarGuard(object):
|
|
|
|
|
|
|
|
"""Class to help protect the environment variable properly. Can be used as
|
|
|
|
a context manager."""
|
|
|
|
|
|
|
|
def __init__(self):
|
2007-04-25 15:54:36 -03:00
|
|
|
self._environ = os.environ
|
2007-01-03 20:23:49 -04:00
|
|
|
self._unset = set()
|
|
|
|
self._reset = dict()
|
|
|
|
|
|
|
|
def set(self, envvar, value):
|
|
|
|
if envvar not in self._environ:
|
|
|
|
self._unset.add(envvar)
|
|
|
|
else:
|
|
|
|
self._reset[envvar] = self._environ[envvar]
|
|
|
|
self._environ[envvar] = value
|
|
|
|
|
|
|
|
def unset(self, envvar):
|
|
|
|
if envvar in self._environ:
|
|
|
|
self._reset[envvar] = self._environ[envvar]
|
|
|
|
del self._environ[envvar]
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, *ignore_exc):
|
|
|
|
for envvar, value in self._reset.iteritems():
|
|
|
|
self._environ[envvar] = value
|
|
|
|
for unset in self._unset:
|
|
|
|
del self._environ[unset]
|
|
|
|
|
2007-03-08 19:58:11 -04:00
|
|
|
class TransientResource(object):
|
|
|
|
|
|
|
|
"""Raise ResourceDenied if an exception is raised while the context manager
|
|
|
|
is in effect that matches the specified exception and attributes."""
|
|
|
|
|
|
|
|
def __init__(self, exc, **kwargs):
|
|
|
|
self.exc = exc
|
|
|
|
self.attrs = kwargs
|
|
|
|
|
|
|
|
def __enter__(self):
|
|
|
|
return self
|
|
|
|
|
|
|
|
def __exit__(self, type_=None, value=None, traceback=None):
|
|
|
|
"""If type_ is a subclass of self.exc and value has attributes matching
|
|
|
|
self.attrs, raise ResourceDenied. Otherwise let the exception
|
|
|
|
propagate (if any)."""
|
|
|
|
if type_ is not None and issubclass(self.exc, type_):
|
|
|
|
for attr, attr_value in self.attrs.iteritems():
|
|
|
|
if not hasattr(value, attr):
|
|
|
|
break
|
|
|
|
if getattr(value, attr) != attr_value:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise ResourceDenied("an optional resource is not available")
|
|
|
|
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2007-03-12 23:34:09 -03:00
|
|
|
def transient_internet():
|
|
|
|
"""Return a context manager that raises ResourceDenied when various issues
|
|
|
|
with the Internet connection manifest themselves as exceptions."""
|
|
|
|
time_out = TransientResource(IOError, errno=errno.ETIMEDOUT)
|
|
|
|
socket_peer_reset = TransientResource(socket.error, errno=errno.ECONNRESET)
|
|
|
|
ioerror_peer_reset = TransientResource(IOError, errno=errno.ECONNRESET)
|
2007-03-13 00:05:40 -03:00
|
|
|
return contextlib.nested(time_out, socket_peer_reset, ioerror_peer_reset)
|
2007-03-12 23:34:09 -03:00
|
|
|
|
|
|
|
|
2007-08-24 15:22:54 -03:00
|
|
|
@contextlib.contextmanager
|
2008-04-01 09:37:43 -03:00
|
|
|
def captured_output(stream_name):
|
|
|
|
"""Run the 'with' statement body using a StringIO object in place of a
|
|
|
|
specific attribute on the sys module.
|
|
|
|
Example use (with 'stream_name=stdout')::
|
2007-08-24 15:22:54 -03:00
|
|
|
|
|
|
|
with captured_stdout() as s:
|
|
|
|
print "hello"
|
|
|
|
assert s.getvalue() == "hello"
|
|
|
|
"""
|
|
|
|
import StringIO
|
2008-04-01 09:37:43 -03:00
|
|
|
orig_stdout = getattr(sys, stream_name)
|
|
|
|
setattr(sys, stream_name, StringIO.StringIO())
|
|
|
|
yield getattr(sys, stream_name)
|
|
|
|
setattr(sys, stream_name, orig_stdout)
|
|
|
|
|
|
|
|
def captured_stdout():
|
|
|
|
return captured_output("stdout")
|
2007-08-24 15:22:54 -03:00
|
|
|
|
|
|
|
|
2006-04-30 08:13:56 -03:00
|
|
|
#=======================================================================
|
|
|
|
# Decorator for running a function in a different locale, correctly resetting
|
|
|
|
# it afterwards.
|
|
|
|
|
|
|
|
def run_with_locale(catstr, *locales):
|
|
|
|
def decorator(func):
|
|
|
|
def inner(*args, **kwds):
|
|
|
|
try:
|
|
|
|
import locale
|
|
|
|
category = getattr(locale, catstr)
|
|
|
|
orig_locale = locale.setlocale(category)
|
|
|
|
except AttributeError:
|
|
|
|
# if the test author gives us an invalid category string
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
# cannot retrieve original locale, so do nothing
|
|
|
|
locale = orig_locale = None
|
|
|
|
else:
|
|
|
|
for loc in locales:
|
|
|
|
try:
|
|
|
|
locale.setlocale(category, loc)
|
|
|
|
break
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# now run the function, resetting the locale on exceptions
|
|
|
|
try:
|
|
|
|
return func(*args, **kwds)
|
|
|
|
finally:
|
|
|
|
if locale and orig_locale:
|
|
|
|
locale.setlocale(category, orig_locale)
|
|
|
|
inner.func_name = func.func_name
|
|
|
|
inner.__doc__ = func.__doc__
|
|
|
|
return inner
|
|
|
|
return decorator
|
|
|
|
|
2006-04-26 12:53:30 -03:00
|
|
|
#=======================================================================
|
|
|
|
# Big-memory-test support. Separate from 'resources' because memory use should be configurable.
|
|
|
|
|
|
|
|
# Some handy shorthands. Note that these are used for byte-limits as well
|
|
|
|
# as size-limits, in the various bigmem tests
|
|
|
|
_1M = 1024*1024
|
|
|
|
_1G = 1024 * _1M
|
|
|
|
_2G = 2 * _1G
|
|
|
|
|
2006-08-09 12:37:26 -03:00
|
|
|
# Hack to get at the maximum value an internal index can take.
|
|
|
|
class _Dummy:
|
|
|
|
def __getslice__(self, i, j):
|
|
|
|
return j
|
|
|
|
MAX_Py_ssize_t = _Dummy()[:]
|
|
|
|
|
2006-04-26 12:53:30 -03:00
|
|
|
def set_memlimit(limit):
|
|
|
|
import re
|
|
|
|
global max_memuse
|
|
|
|
sizes = {
|
|
|
|
'k': 1024,
|
|
|
|
'm': _1M,
|
|
|
|
'g': _1G,
|
|
|
|
't': 1024*_1G,
|
|
|
|
}
|
|
|
|
m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
|
|
|
|
re.IGNORECASE | re.VERBOSE)
|
|
|
|
if m is None:
|
|
|
|
raise ValueError('Invalid memory limit %r' % (limit,))
|
|
|
|
memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
|
2006-08-09 12:37:26 -03:00
|
|
|
if memlimit > MAX_Py_ssize_t:
|
|
|
|
memlimit = MAX_Py_ssize_t
|
|
|
|
if memlimit < _2G - 1:
|
2006-04-26 12:53:30 -03:00
|
|
|
raise ValueError('Memory limit %r too low to be useful' % (limit,))
|
|
|
|
max_memuse = memlimit
|
|
|
|
|
|
|
|
def bigmemtest(minsize, memuse, overhead=5*_1M):
|
|
|
|
"""Decorator for bigmem tests.
|
|
|
|
|
|
|
|
'minsize' is the minimum useful size for the test (in arbitrary,
|
|
|
|
test-interpreted units.) 'memuse' is the number of 'bytes per size' for
|
|
|
|
the test, or a good estimate of it. 'overhead' specifies fixed overhead,
|
|
|
|
independant of the testsize, and defaults to 5Mb.
|
2006-04-26 14:11:16 -03:00
|
|
|
|
2006-04-26 12:53:30 -03:00
|
|
|
The decorator tries to guess a good value for 'size' and passes it to
|
|
|
|
the decorated test function. If minsize * memuse is more than the
|
|
|
|
allowed memory use (as defined by max_memuse), the test is skipped.
|
|
|
|
Otherwise, minsize is adjusted upward to use up to max_memuse.
|
|
|
|
"""
|
|
|
|
def decorator(f):
|
|
|
|
def wrapper(self):
|
|
|
|
if not max_memuse:
|
|
|
|
# If max_memuse is 0 (the default),
|
|
|
|
# we still want to run the tests with size set to a few kb,
|
|
|
|
# to make sure they work. We still want to avoid using
|
|
|
|
# too much memory, though, but we do that noisily.
|
2006-04-27 19:38:32 -03:00
|
|
|
maxsize = 5147
|
2006-04-26 12:53:30 -03:00
|
|
|
self.failIf(maxsize * memuse + overhead > 20 * _1M)
|
|
|
|
else:
|
|
|
|
maxsize = int((max_memuse - overhead) / memuse)
|
|
|
|
if maxsize < minsize:
|
|
|
|
# Really ought to print 'test skipped' or something
|
|
|
|
if verbose:
|
|
|
|
sys.stderr.write("Skipping %s because of memory "
|
|
|
|
"constraint\n" % (f.__name__,))
|
|
|
|
return
|
|
|
|
# Try to keep some breathing room in memory use
|
|
|
|
maxsize = max(maxsize - 50 * _1M, minsize)
|
|
|
|
return f(self, maxsize)
|
|
|
|
wrapper.minsize = minsize
|
|
|
|
wrapper.memuse = memuse
|
|
|
|
wrapper.overhead = overhead
|
|
|
|
return wrapper
|
|
|
|
return decorator
|
|
|
|
|
2006-08-09 12:37:26 -03:00
|
|
|
def bigaddrspacetest(f):
|
|
|
|
"""Decorator for tests that fill the address space."""
|
|
|
|
def wrapper(self):
|
|
|
|
if max_memuse < MAX_Py_ssize_t:
|
|
|
|
if verbose:
|
|
|
|
sys.stderr.write("Skipping %s because of memory "
|
|
|
|
"constraint\n" % (f.__name__,))
|
|
|
|
else:
|
|
|
|
return f(self)
|
|
|
|
return wrapper
|
|
|
|
|
2001-03-21 14:26:33 -04:00
|
|
|
#=======================================================================
|
2007-04-25 15:54:36 -03:00
|
|
|
# unittest integration.
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2001-03-22 04:45:36 -04:00
|
|
|
class BasicTestRunner:
|
2001-03-21 14:26:33 -04:00
|
|
|
def run(self, test):
|
2001-03-22 04:45:36 -04:00
|
|
|
result = unittest.TestResult()
|
2001-03-21 14:26:33 -04:00
|
|
|
test(result)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2007-04-25 14:29:52 -03:00
|
|
|
def _run_suite(suite):
|
2001-09-20 03:31:22 -03:00
|
|
|
"""Run tests from a unittest.TestSuite-derived class."""
|
2001-03-21 14:26:33 -04:00
|
|
|
if verbose:
|
2001-03-23 00:21:17 -04:00
|
|
|
runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
|
2001-03-21 14:26:33 -04:00
|
|
|
else:
|
2001-03-22 04:45:36 -04:00
|
|
|
runner = BasicTestRunner()
|
2001-03-21 14:26:33 -04:00
|
|
|
|
2001-03-22 04:45:36 -04:00
|
|
|
result = runner.run(suite)
|
|
|
|
if not result.wasSuccessful():
|
2001-07-16 15:51:32 -03:00
|
|
|
if len(result.errors) == 1 and not result.failures:
|
|
|
|
err = result.errors[0][1]
|
|
|
|
elif len(result.failures) == 1 and not result.errors:
|
|
|
|
err = result.failures[0][1]
|
|
|
|
else:
|
2007-10-13 21:18:40 -03:00
|
|
|
err = "errors occurred; run in verbose mode for details"
|
2001-09-08 00:37:56 -03:00
|
|
|
raise TestFailed(err)
|
2001-09-09 03:12:01 -03:00
|
|
|
|
2001-09-20 03:30:41 -03:00
|
|
|
|
2003-05-01 14:45:56 -03:00
|
|
|
def run_unittest(*classes):
|
|
|
|
"""Run tests from unittest.TestCase-derived classes."""
|
2007-04-25 14:29:52 -03:00
|
|
|
valid_types = (unittest.TestSuite, unittest.TestCase)
|
2003-04-27 04:54:23 -03:00
|
|
|
suite = unittest.TestSuite()
|
2003-05-01 14:45:56 -03:00
|
|
|
for cls in classes:
|
2007-04-25 14:29:52 -03:00
|
|
|
if isinstance(cls, str):
|
|
|
|
if cls in sys.modules:
|
|
|
|
suite.addTest(unittest.findTestCases(sys.modules[cls]))
|
|
|
|
else:
|
|
|
|
raise ValueError("str arguments must be keys in sys.modules")
|
|
|
|
elif isinstance(cls, valid_types):
|
2003-07-15 23:59:32 -03:00
|
|
|
suite.addTest(cls)
|
|
|
|
else:
|
|
|
|
suite.addTest(unittest.makeSuite(cls))
|
2007-04-25 14:29:52 -03:00
|
|
|
_run_suite(suite)
|
2003-04-27 04:54:23 -03:00
|
|
|
|
2001-09-20 03:30:41 -03:00
|
|
|
|
2001-09-09 03:12:01 -03:00
|
|
|
#=======================================================================
|
|
|
|
# doctest driver.
|
|
|
|
|
|
|
|
def run_doctest(module, verbosity=None):
|
2001-10-03 01:08:26 -03:00
|
|
|
"""Run doctest on the given module. Return (#failures, #tests).
|
2001-09-09 03:12:01 -03:00
|
|
|
|
|
|
|
If optional argument verbosity is not specified (or is None), pass
|
2001-09-09 22:39:21 -03:00
|
|
|
test_support's belief about verbosity on to doctest. Else doctest's
|
|
|
|
usual behavior is used (it searches sys.argv for -v).
|
2001-09-09 03:12:01 -03:00
|
|
|
"""
|
|
|
|
|
|
|
|
import doctest
|
|
|
|
|
|
|
|
if verbosity is None:
|
|
|
|
verbosity = verbose
|
|
|
|
else:
|
|
|
|
verbosity = None
|
|
|
|
|
2001-09-25 16:13:20 -03:00
|
|
|
# Direct doctest output (normally just errors) to real stdout; doctest
|
|
|
|
# output shouldn't be compared by regrtest.
|
|
|
|
save_stdout = sys.stdout
|
2001-09-25 17:05:11 -03:00
|
|
|
sys.stdout = get_original_stdout()
|
2001-09-25 16:13:20 -03:00
|
|
|
try:
|
|
|
|
f, t = doctest.testmod(module, verbose=verbosity)
|
|
|
|
if f:
|
|
|
|
raise TestFailed("%d of %d doctests failed" % (f, t))
|
|
|
|
finally:
|
|
|
|
sys.stdout = save_stdout
|
2003-05-16 21:58:33 -03:00
|
|
|
if verbose:
|
2003-05-16 22:59:57 -03:00
|
|
|
print 'doctest (%s) ... %d tests with zero failures' % (module.__name__, t)
|
2003-05-16 21:58:33 -03:00
|
|
|
return f, t
|
2006-06-18 16:35:01 -03:00
|
|
|
|
|
|
|
#=======================================================================
|
|
|
|
# Threading support to prevent reporting refleaks when running regrtest.py -R
|
|
|
|
|
|
|
|
def threading_setup():
|
|
|
|
import threading
|
|
|
|
return len(threading._active), len(threading._limbo)
|
|
|
|
|
|
|
|
def threading_cleanup(num_active, num_limbo):
|
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
|
|
|
|
_MAX_COUNT = 10
|
|
|
|
count = 0
|
|
|
|
while len(threading._active) != num_active and count < _MAX_COUNT:
|
|
|
|
count += 1
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
while len(threading._limbo) != num_limbo and count < _MAX_COUNT:
|
|
|
|
count += 1
|
|
|
|
time.sleep(0.1)
|
2006-06-29 01:10:08 -03:00
|
|
|
|
|
|
|
def reap_children():
|
|
|
|
"""Use this function at the end of test_main() whenever sub-processes
|
|
|
|
are started. This will help ensure that no extra children (zombies)
|
|
|
|
stick around to hog resources and create problems when looking
|
|
|
|
for refleaks.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Reap all our dead child processes so we don't leave zombies around.
|
|
|
|
# These hog resources and might be causing some of the buildbots to die.
|
|
|
|
if hasattr(os, 'waitpid'):
|
|
|
|
any_process = -1
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
# This will raise an exception on Windows. That's ok.
|
|
|
|
pid, status = os.waitpid(any_process, os.WNOHANG)
|
|
|
|
if pid == 0:
|
|
|
|
break
|
|
|
|
except:
|
|
|
|
break
|