Merged revisions 78982,78986 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/trunk

........
  r78982 | florent.xicluna | 2010-03-15 15:00:58 +0100 (lun, 15 mar 2010) | 2 lines

  Remove py3k deprecation warnings from these Unicode tools.
........
  r78986 | florent.xicluna | 2010-03-15 19:08:58 +0100 (lun, 15 mar 2010) | 3 lines

  Issue #7783 and #7787: open_urlresource invalidates the outdated files from the local cache.
  Use this feature to fix test_normalization.
........
This commit is contained in:
Florent Xicluna 2010-03-19 14:25:03 +00:00
parent faa663f03d
commit f089fd67fc
5 changed files with 40 additions and 20 deletions

View File

@ -33,6 +33,7 @@ __all__ = ["Error", "TestFailed", "ResourceDenied", "import_module",
"reap_children", "cpython_only", "check_impl_detail", "get_attribute",
"swap_item", "swap_attr"]
class Error(Exception):
"""Base class for regression test exceptions."""
@ -444,12 +445,29 @@ def check_syntax_error(testcase, statement):
def open_urlresource(url, *args, **kw):
import urllib.request, urllib.parse
requires('urlfetch')
check = kw.pop('check', None)
filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
fn = os.path.join(os.path.dirname(__file__), "data", filename)
def check_valid_file(fn):
f = open(fn, *args, **kw)
if check is None:
return f
elif check(f):
f.seek(0)
return f
f.close()
if os.path.exists(fn):
return open(fn, *args, **kw)
f = check_valid_file(fn)
if f is not None:
return f
unlink(fn)
# Verify the requirement before downloading the file
requires('urlfetch')
print('\tfetching %s ...' % url, file=get_original_stdout())
f = urllib.request.urlopen(url, timeout=15)
@ -461,7 +479,12 @@ def open_urlresource(url, *args, **kw):
s = f.read()
finally:
f.close()
return open(fn, *args, **kw)
f = check_valid_file(fn)
if f is not None:
return f
raise TestFailed('invalid resource "%s"' % fn)
class WarningsRecorder(object):
"""Convenience wrapper for the warnings list returned on

View File

@ -9,14 +9,9 @@ from unicodedata import normalize, unidata_version
TESTDATAFILE = "NormalizationTest.txt"
TESTDATAURL = "http://www.unicode.org/Public/" + unidata_version + "/ucd/" + TESTDATAFILE
# Verify we have the correct version of the test data file.
TESTDATAPATH = os.path.join(os.path.dirname(__file__), "data", TESTDATAFILE)
if os.path.exists(TESTDATAPATH):
f = open(TESTDATAPATH, encoding='utf-8')
l = f.readline()
f.close()
if not unidata_version in l:
os.unlink(testdatafile)
def check_version(testfile):
hdr = testfile.readline()
return unidata_version in hdr
class RangeError(Exception):
pass
@ -42,13 +37,15 @@ def unistr(data):
class NormalizationTest(unittest.TestCase):
def test_main(self):
part = None
part1_data = {}
# Hit the exception early
try:
open_urlresource(TESTDATAURL, encoding="utf-8")
testdata = open_urlresource(TESTDATAURL, encoding="utf-8",
check=check_version)
except (IOError, HTTPException):
self.skipTest("Could not retrieve " + TESTDATAURL)
for line in open_urlresource(TESTDATAURL, encoding="utf-8"):
for line in testdata:
if '#' in line:
line = line.split('#')[0]
line = line.strip()

View File

@ -903,13 +903,16 @@ Documentation
- Issue #6556: Fixed the Distutils configuration files location explanation
for Windows.
- Update python manual page (options -B, -O0, -s, environment variables
PYTHONDONTWRITEBYTECODE, PYTHONNOUSERSITE).
Tests
-----
- Issue #7783: test.test_support.open_urlresource invalidates the outdated
files from the local cache.
- Issue #7849: Now the utility ``check_warnings`` verifies if the warnings are
effectively raised.

View File

@ -40,8 +40,7 @@ mapRE = re.compile('((?:0x[0-9a-fA-F]+\+?)+)'
'\s*'
'(#.+)?')
def parsecodes(codes,
len=len, filter=filter,range=range):
def parsecodes(codes, len=len, range=range):
""" Converts code combinations to either a single code integer
or a tuple of integers.

View File

@ -517,8 +517,7 @@ def makeunicodetype(unicode, trace):
haswide = False
hasnonewide = False
spaces.sort()
for codepoint in spaces:
for codepoint in sorted(spaces):
if codepoint < 0x10000:
hasnonewide = True
if codepoint >= 0x10000 and not haswide:
@ -546,8 +545,7 @@ def makeunicodetype(unicode, trace):
print(' switch (ch) {', file=fp)
haswide = False
hasnonewide = False
linebreaks.sort()
for codepoint in linebreaks:
for codepoint in sorted(linebreaks):
if codepoint < 0x10000:
hasnonewide = True
if codepoint >= 0x10000 and not haswide: