merge heads

This commit is contained in:
Benjamin Peterson 2012-04-06 13:20:39 -04:00
commit 403bb39d1d
15 changed files with 183 additions and 92 deletions

View File

@ -735,7 +735,7 @@ The p1.stdout.close() call after starting the p2 is important in order for p1
to receive a SIGPIPE if p2 exits before p1.
Alternatively, for trusted input, the shell's own pipeline support may still
be used directly:
be used directly::
output=`dmesg | grep hda`
# becomes

View File

@ -430,7 +430,7 @@ All methods are executed atomically.
are blocked waiting for the lock to become unlocked, allow exactly one of them
to proceed.
Do not call this method when the lock is unlocked.
When invoked on an unlocked lock, a :exc:`ThreadError` is raised.
There is no return value.

View File

@ -78,7 +78,7 @@ class TabSet(Frame):
def remove_tab(self, tab_name):
"""Remove the tab named <tab_name>"""
if not tab_name in self._tab_names:
raise KeyError("No such Tab: '%s" % page_name)
raise KeyError("No such Tab: '%s" % tab_name)
self._tab_names.remove(tab_name)
self._arrange_tabs()
@ -88,7 +88,7 @@ class TabSet(Frame):
if tab_name == self._selected_tab:
return
if tab_name is not None and tab_name not in self._tabs:
raise KeyError("No such Tab: '%s" % page_name)
raise KeyError("No such Tab: '%s" % tab_name)
# deselect the current selected tab
if self._selected_tab is not None:

View File

@ -101,6 +101,10 @@ def _validate_family(family):
if sys.platform != 'win32' and family == 'AF_PIPE':
raise ValueError('Family %s is not recognized.' % family)
if sys.platform == 'win32' and family == 'AF_UNIX':
# double check
if not hasattr(socket, family):
raise ValueError('Family %s is not recognized.' % family)
def address_type(address):
'''

View File

@ -2331,6 +2331,12 @@ class TestInvalidFamily(unittest.TestCase):
with self.assertRaises(ValueError):
multiprocessing.connection.Listener(r'\\.\test')
@unittest.skipUnless(WIN32, "skipped on non-Windows platforms")
def test_invalid_family_win32(self):
with self.assertRaises(ValueError):
multiprocessing.connection.Listener('/var/test.pipe')
testcases_other = [OtherTest, TestInvalidHandle, TestInitializers,
TestStdinBadfiledescriptor, TestInvalidFamily]

View File

@ -5,8 +5,11 @@ Tools directory of a Python checkout or tarball, such as reindent.py.
"""
import os
import sys
import imp
import unittest
import sysconfig
import tempfile
from test import support
from test.script_helper import assert_python_ok
@ -17,10 +20,11 @@ if not sysconfig.is_python_build():
srcdir = sysconfig.get_config_var('projectbase')
basepath = os.path.join(os.getcwd(), srcdir, 'Tools')
scriptsdir = os.path.join(basepath, 'scripts')
class ReindentTests(unittest.TestCase):
script = os.path.join(basepath, 'scripts', 'reindent.py')
script = os.path.join(scriptsdir, 'reindent.py')
def test_noargs(self):
assert_python_ok(self.script)
@ -31,8 +35,73 @@ class ReindentTests(unittest.TestCase):
self.assertGreater(err, b'')
class TestSundryScripts(unittest.TestCase):
# At least make sure the rest don't have syntax errors. When tests are
# added for a script it should be added to the whitelist below.
# scripts that have independent tests.
whitelist = ['reindent.py']
# scripts that can't be imported without running
blacklist = ['make_ctype.py']
# scripts that use windows-only modules
windows_only = ['win_add2path.py']
# blacklisted for other reasons
other = ['analyze_dxp.py']
skiplist = blacklist + whitelist + windows_only + other
def setUp(self):
cm = support.DirsOnSysPath(scriptsdir)
cm.__enter__()
self.addCleanup(cm.__exit__)
def test_sundry(self):
for fn in os.listdir(scriptsdir):
if fn.endswith('.py') and fn not in self.skiplist:
__import__(fn[:-3])
@unittest.skipIf(sys.platform != "win32", "Windows-only test")
def test_sundry_windows(self):
for fn in self.windows_only:
__import__(fn[:-3])
@unittest.skipIf(not support.threading, "test requires _thread module")
def test_analyze_dxp_import(self):
if hasattr(sys, 'getdxp'):
import analyze_dxp
else:
with self.assertRaises(RuntimeError):
import analyze_dxp
class PdepsTests(unittest.TestCase):
@classmethod
def setUpClass(self):
path = os.path.join(scriptsdir, 'pdeps.py')
self.pdeps = imp.load_source('pdeps', path)
@classmethod
def tearDownClass(self):
if 'pdeps' in sys.modules:
del sys.modules['pdeps']
def test_process_errors(self):
# Issue #14492: m_import.match(line) can be None.
with tempfile.TemporaryDirectory() as tmpdir:
fn = os.path.join(tmpdir, 'foo')
with open(fn, 'w') as stream:
stream.write("#!/this/will/fail")
self.pdeps.process(fn, {})
def test_inverse_attribute_error(self):
# Issue #14492: this used to fail with an AttributeError.
self.pdeps.inverse({'a': []})
def test_main():
support.run_unittest(ReindentTests)
support.run_unittest(*[obj for obj in globals().values()
if isinstance(obj, type)])
if __name__ == '__main__':

View File

@ -1253,7 +1253,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView):
def exists(self, item):
"""Returns True if the specified item is present in the three,
"""Returns True if the specified item is present in the tree,
False otherwise."""
return bool(self.tk.call(self._w, "exists", item))

View File

@ -39,6 +39,13 @@ Core and Builtins
Library
-------
- Issue #14496: Fix wrong name in idlelib/tabbedpages.py.
Patch by Popa Claudiu.
- Issue #14482: Raise a ValueError, not a NameError, when trying to create
a multiprocessing Client or Listener with an AF_UNIX type address under
Windows. Patch by Popa Claudiu.
- Issue #14151: Raise a ValueError, not a NameError, when trying to create
a multiprocessing Client or Listener with an AF_PIPE type address under
non-Windows platforms. Patch by Popa Claudiu.

View File

@ -3,34 +3,6 @@
# Usage: abitype.py < old_code > new_code
import re, sys
############ Simplistic C scanner ##################################
tokenizer = re.compile(
r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
###### Replacement of PyTypeObject static instances ##############
# classify each token, giving it a one-letter code:
@ -79,7 +51,7 @@ def get_fields(start, real_end):
while tokens[pos][0] in ('ws', 'comment'):
pos += 1
if tokens[pos][1] != 'PyVarObject_HEAD_INIT':
raise Exception, '%s has no PyVarObject_HEAD_INIT' % name
raise Exception('%s has no PyVarObject_HEAD_INIT' % name)
while tokens[pos][1] != ')':
pos += 1
pos += 1
@ -183,18 +155,48 @@ def make_slots(name, fields):
return '\n'.join(res)
# Main loop: replace all static PyTypeObjects until
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
if __name__ == '__main__':
# Output result to stdout
for t, v in tokens:
sys.stdout.write(v)
############ Simplistic C scanner ##################################
tokenizer = re.compile(
r"(?P<preproc>#.*\n)"
r"|(?P<comment>/\*.*?\*/)"
r"|(?P<ident>[a-zA-Z_][a-zA-Z0-9_]*)"
r"|(?P<ws>[ \t\n]+)"
r"|(?P<other>.)",
re.MULTILINE)
tokens = []
source = sys.stdin.read()
pos = 0
while pos != len(source):
m = tokenizer.match(source, pos)
tokens.append([m.lastgroup, m.group()])
pos += len(tokens[-1][1])
if tokens[-1][0] == 'preproc':
# continuation lines are considered
# only in preprocess statements
while tokens[-1][1].endswith('\\\n'):
nl = source.find('\n', pos)
if nl == -1:
line = source[pos:]
else:
line = source[pos:nl+1]
tokens[-1][1] += line
pos += len(line)
# Main loop: replace all static PyTypeObjects until
# there are none left.
while 1:
c = classify()
m = re.search('(SW)?TWIW?=W?{.*?};', c)
if not m:
break
start = m.start()
end = m.end()
name, fields = get_fields(start, m)
tokens[start:end] = [('',make_slots(name, fields))]
# Output result to stdout
for t, v in tokens:
sys.stdout.write(v)

View File

@ -106,14 +106,16 @@ def check_limit(n, test_func_name):
else:
print("Yikes!")
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100
if __name__ == '__main__':
limit = 1000
while 1:
check_limit(limit, "test_recurse")
check_limit(limit, "test_add")
check_limit(limit, "test_repr")
check_limit(limit, "test_init")
check_limit(limit, "test_getattr")
check_limit(limit, "test_getitem")
check_limit(limit, "test_cpickle")
print("Limit of %d is fine" % limit)
limit = limit + 100

View File

@ -76,29 +76,31 @@ usage = """Usage: %s [-cd] paths...
-c: recognize Python source files trying to compile them
-d: debug output""" % sys.argv[0]
try:
opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
if __name__ == '__main__':
is_python = pysource.looks_like_python
debug = False
try:
opts, args = getopt.getopt(sys.argv[1:], 'cd')
except getopt.error as msg:
print(msg, file=sys.stderr)
print(usage, file=sys.stderr)
sys.exit(1)
for o, a in opts:
if o == '-c':
is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
is_python = pysource.looks_like_python
debug = False
if not args:
print(usage, file=sys.stderr)
sys.exit(1)
for o, a in opts:
if o == '-c':
is_python = pysource.can_be_compiled
elif o == '-d':
debug = True
for fullpath in pysource.walk_python_files(args, is_python):
if debug:
print("Testing for coding: %s" % fullpath)
result = needs_declaration(fullpath)
if result:
print(fullpath)
if not args:
print(usage, file=sys.stderr)
sys.exit(1)
for fullpath in pysource.walk_python_files(args, is_python):
if debug:
print("Testing for coding: %s" % fullpath)
result = needs_declaration(fullpath)
if result:
print(fullpath)

View File

@ -292,7 +292,7 @@ def addsubst(substfile):
if not words: continue
if len(words) == 3 and words[0] == 'struct':
words[:2] = [words[0] + ' ' + words[1]]
elif len(words) <> 2:
elif len(words) != 2:
err(substfile + '%s:%r: warning: bad line: %r' % (substfile, lineno, line))
continue
if Reverse:

View File

@ -20,7 +20,7 @@ file ... : files to sum; '-' or no files means stdin
import sys
import os
import getopt
import md5
from hashlib import md5
def sum(*files):
sts = 0

View File

@ -13,7 +13,6 @@
"""
import re,sys
import TextTools
entityRE = re.compile('<!ENTITY +(\w+) +CDATA +"([^"]+)" +-- +((?:.|\n)+?) *-->')
@ -45,7 +44,7 @@ def writefile(f,defs):
charcode = repr(charcode)
else:
charcode = repr(charcode)
comment = TextTools.collapse(comment)
comment = ' '.join(comment.split())
f.write(" '%s':\t%s, \t# %s\n" % (name,charcode,comment))
f.write('\n}\n')

View File

@ -76,10 +76,9 @@ def process(filename, table):
nextline = fp.readline()
if not nextline: break
line = line[:-1] + nextline
if m_import.match(line) >= 0:
(a, b), (a1, b1) = m_import.regs[:2]
elif m_from.match(line) >= 0:
(a, b), (a1, b1) = m_from.regs[:2]
m_found = m_import.match(line) or m_from.match(line)
if m_found:
(a, b), (a1, b1) = m_found.regs[:2]
else: continue
words = line[a1:b1].split(',')
# print '#', line, words
@ -87,6 +86,7 @@ def process(filename, table):
word = word.strip()
if word not in list:
list.append(word)
fp.close()
# Compute closure (this is in fact totally general)
@ -123,7 +123,7 @@ def closure(table):
def inverse(table):
inv = {}
for key in table.keys():
if not inv.has_key(key):
if key not in inv:
inv[key] = []
for item in table[key]:
store(inv, item, key)