#14490, #14491: add 'sundry'-style import tests for Tools/scripts.

This patch changes a few of the scripts to have __name__=='__main__'
clauses so that they are importable without running.  Also fixes the
syntax errors revealed by the tests.
This commit is contained in:
R David Murray 2012-04-04 21:28:14 -04:00
parent b6046301ef
commit 54ac832a24
7 changed files with 128 additions and 82 deletions

View File

@ -5,6 +5,7 @@ Tools directory of a Python checkout or tarball, such as reindent.py.
"""
import os
import sys
import unittest
import sysconfig
from test import support
@ -17,10 +18,11 @@ if not sysconfig.is_python_build():
srcdir = sysconfig.get_config_var('projectbase')
basepath = os.path.join(os.getcwd(), srcdir, 'Tools')
scriptsdir = os.path.join(basepath, 'scripts')
class ReindentTests(unittest.TestCase):
script = os.path.join(basepath, 'scripts', 'reindent.py')
script = os.path.join(scriptsdir, 'reindent.py')
def test_noargs(self):
assert_python_ok(self.script)
@ -31,8 +33,47 @@ class ReindentTests(unittest.TestCase):
self.assertGreater(err, b'')
class TestSundryScripts(unittest.TestCase):
# At least make sure the rest don't have syntax errors. When tests are
# added for a script it should be added to the whitelist below.
# scripts that have independent tests.
whitelist = ['reindent.py']
# scripts that can't be imported without running
blacklist = ['make_ctype.py']
# scripts that use windows-only modules
windows_only = ['win_add2path.py']
# blacklisted for other reasons
other = ['analyze_dxp.py']
skiplist = blacklist + whitelist + windows_only + other
def setUp(self):
cm = support.DirsOnSysPath(scriptsdir)
cm.__enter__()
self.addCleanup(cm.__exit__)
def test_sundry(self):
for fn in os.listdir(scriptsdir):
if fn.endswith('.py') and fn not in self.skiplist:
__import__(fn[:-3])
@unittest.skipIf(sys.platform != "win32", "Windows-only test")
def test_sundry_windows(self):
for fn in self.windows_only:
__import__(fn[:-3])
def test_analyze_dxp_import(self):
if hasattr(sys, 'getdxp'):
import analyze_dxp
else:
with self.assertRaises(RuntimeError):
import analyze_dxp
def test_main():
support.run_unittest(ReindentTests)
support.run_unittest(*[obj for obj in globals().values()
if isinstance(obj, type)])
if __name__ == '__main__':

View File

@ -3,34 +3,6 @@
# Usage: abitype.py < old_code > new_code
import re, sys
############ Simplistic C scanner ##################################
tokenizer = re.compile(
r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
###### Replacement of PyTypeObject static instances ##############
# classify each token, giving it a one-letter code:
@ -79,7 +51,7 @@ def get_fields(start, real_end):
while tokens[pos][0] in ('ws', 'comment'):
pos += 1
if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
raise Exception, '%s has no PyVarObject_HEAD_INIT' % name
raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
while tokens[pos][1] != ')':
pos += 1
pos += 1
@ -183,18 +155,48 @@ def make_slots(name, fields):
return '\n'.join(res)
# Main loop: replace all static PyTypeObjects until
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
if __name__ == '__main__':
# Output result to stdout
for t, v in tokens:
sys.stdout.write(v)
############ Simplistic C scanner ##################################
tokenizer = re.compile(
r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
# Main loop: replace all static PyTypeObjects until
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
# Output result to stdout
for t, v in tokens:
sys.stdout.write(v)

View File

@ -106,14 +106,16 @@ def check_limit(n, test_func_name):
else:
print("Yikes!")
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100
if __name__ == '__main__':
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100

View File

@ -76,29 +76,31 @@ usage = """Usage: %s [-cd] paths...
-c: recognize Python source files trying to compile them
-d: debug output""" % sys.argv[0]
try:
opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
is_python = pysource.looks_like_python
debug = False
try:
opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
for o, a in opts:
if o == '-c':
is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
is_python = pysource.looks_like_python
debug = False
if not args:
print(usage, file=sys.stderr)
sys.exit(1)
for o, a in opts:
if o == '-c':
is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
for fullpath in pysource.walk_python_files(args, is_python):
if debug:
print("Testing for coding: %s" % fullpath)
result = needs_declaration(fullpath)
if result:
print(fullpath)
if not args:
print(usage, file=sys.stderr)
sys.exit(1)
for fullpath in pysource.walk_python_files(args, is_python):
if debug:
print("Testing for coding: %s" % fullpath)
result = needs_declaration(fullpath)
if result:
print(fullpath)

View File

@ -292,7 +292,7 @@ def addsubst(substfile):
if not words: continue
if len(words) == 3 and words[0] == 'struct':
words[:2] = [words[0] + ' ' + words[1]]
elif len(words) <> 2:
elif len(words) != 2:
err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
continue
if Reverse:

View File

@ -20,7 +20,7 @@ file ... : files to sum; '-' or no files means stdin
import sys
import os
import getopt
import md5
from hashlib import md5
def sum(*files):
sts = 0

View File

@ -13,7 +13,6 @@
"""
import re,sys
import TextTools
entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
@ -45,7 +44,7 @@ def writefile(f,defs):
charcode = repr(charcode)
else:
charcode = repr(charcode)
comment = TextTools.collapse(comment)
comment = ' '.join(comment.split())
f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment))
f.write('\n}\n')