Merged revisions 66887,66891,66902-66903,66905-66906,66911-66913,66922,66927-66928,66936,66939-66940,66962,66964,66973 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ................ r66887 | benjamin.peterson | 2008-10-13 16:51:40 -0500 (Mon, 13 Oct 2008) | 1 line document how to disable fixers ................ r66891 | amaury.forgeotdarc | 2008-10-14 16:47:22 -0500 (Tue, 14 Oct 2008) | 5 lines #4122: On Windows, Py_UNICODE_ISSPACE cannot be used in an extension module: compilation fails with "undefined reference to _Py_ascii_whitespace" Will backport to 2.6. ................ r66902 | skip.montanaro | 2008-10-15 06:49:10 -0500 (Wed, 15 Oct 2008) | 1 line easter egg ................ r66903 | benjamin.peterson | 2008-10-15 15:34:09 -0500 (Wed, 15 Oct 2008) | 1 line don't recurse into directories that start with '.' ................ r66905 | benjamin.peterson | 2008-10-15 16:05:55 -0500 (Wed, 15 Oct 2008) | 1 line support the optional line argument for idle ................ r66906 | benjamin.peterson | 2008-10-15 16:58:46 -0500 (Wed, 15 Oct 2008) | 1 line add a much requested newline ................ r66911 | benjamin.peterson | 2008-10-15 18:10:28 -0500 (Wed, 15 Oct 2008) | 41 lines Merged revisions 66805,66841,66860,66884-66886,66893,66907,66910 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r66805 | benjamin.peterson | 2008-10-04 20:11:02 -0500 (Sat, 04 Oct 2008) | 1 line mention what the fixes directory is for ........ r66841 | benjamin.peterson | 2008-10-07 17:48:12 -0500 (Tue, 07 Oct 2008) | 1 line use assertFalse and assertTrue ........ r66860 | benjamin.peterson | 2008-10-08 16:05:07 -0500 (Wed, 08 Oct 2008) | 1 line instead of abusing the pattern matcher, use start_tree to find a next binding ........ r66884 | benjamin.peterson | 2008-10-13 15:50:30 -0500 (Mon, 13 Oct 2008) | 1 line don't print tokens to stdout when -v is given ........ r66885 | benjamin.peterson | 2008-10-13 16:28:57 -0500 (Mon, 13 Oct 2008) | 1 line add the -x option to disable fixers ........ r66886 | benjamin.peterson | 2008-10-13 16:33:53 -0500 (Mon, 13 Oct 2008) | 1 line cut down on some crud ........ r66893 | benjamin.peterson | 2008-10-14 17:16:54 -0500 (Tue, 14 Oct 2008) | 1 line add an optional set literal fixer ........ r66907 | benjamin.peterson | 2008-10-15 16:59:41 -0500 (Wed, 15 Oct 2008) | 1 line don't write backup files by default ........ r66910 | benjamin.peterson | 2008-10-15 17:43:10 -0500 (Wed, 15 Oct 2008) | 1 line add the -n option; it stops backupfiles from being written ........ ................ r66912 | hirokazu.yamamoto | 2008-10-16 01:25:25 -0500 (Thu, 16 Oct 2008) | 2 lines removed unused _PyUnicode_FromFileSystemEncodedObject. made win32_chdir, win32_wchdir static. ................ r66913 | benjamin.peterson | 2008-10-16 13:52:14 -0500 (Thu, 16 Oct 2008) | 1 line document that deque indexing is O(n) #4123 ................ r66922 | benjamin.peterson | 2008-10-16 14:40:14 -0500 (Thu, 16 Oct 2008) | 1 line use new showwarnings signature for idle #3391 ................ r66927 | andrew.kuchling | 2008-10-16 15:15:47 -0500 (Thu, 16 Oct 2008) | 1 line Fix wording (2.6.1 backport candidate) ................ r66928 | georg.brandl | 2008-10-16 15:20:56 -0500 (Thu, 16 Oct 2008) | 2 lines Add more TOC to the whatsnew index page. ................ r66936 | georg.brandl | 2008-10-16 16:20:15 -0500 (Thu, 16 Oct 2008) | 2 lines #4131: FF3 doesn't write cookies.txt files. ................ r66939 | georg.brandl | 2008-10-16 16:36:39 -0500 (Thu, 16 Oct 2008) | 2 lines part of #4012: kill off old name "processing". ................ r66940 | georg.brandl | 2008-10-16 16:38:48 -0500 (Thu, 16 Oct 2008) | 2 lines #4083: add "as" to except handler grammar as per PEP 3110. ................ r66962 | benjamin.peterson | 2008-10-17 15:01:01 -0500 (Fri, 17 Oct 2008) | 1 line clarify CALL_FUNCTION #4141 ................ r66964 | georg.brandl | 2008-10-17 16:41:49 -0500 (Fri, 17 Oct 2008) | 2 lines Fix duplicate word. ................ r66973 | armin.ronacher | 2008-10-19 03:27:43 -0500 (Sun, 19 Oct 2008) | 3 lines Fixed #4067 by implementing _attributes and _fields for the AST root node. ................
This commit is contained in:
parent
5f3b63ad6f
commit
206e3074d3
|
@ -190,6 +190,7 @@ docs@python.org), and we'll be glad to correct the problem.
|
|||
* Reuben Sumner
|
||||
* Kalle Svensson
|
||||
* Jim Tittsler
|
||||
* David Turner
|
||||
* Ville Vainio
|
||||
* Martijn Vries
|
||||
* Charles G. Waldman
|
||||
|
|
|
@ -53,13 +53,17 @@ After transformation, :file:`example.py` looks like this::
|
|||
Comments and and exact indentation are preserved throughout the translation
|
||||
process.
|
||||
|
||||
By default, 2to3 runs a set of predefined fixers. The :option:`-l` flag
|
||||
lists all avaible fixers. An explicit set of fixers to run can be given by use
|
||||
of the :option:`-f` flag. The following example runs only the ``imports`` and
|
||||
``has_key`` fixers::
|
||||
By default, 2to3 runs a set of predefined fixers. The :option:`-l` flag lists
|
||||
all avaible fixers. An explicit set of fixers to run can be given with
|
||||
:option:`-f`. Likewise the :option:`-x` explicitly disables a fixer. The
|
||||
following example runs only the ``imports`` and ``has_key`` fixers::
|
||||
|
||||
$ 2to3 -f imports -f has_key example.py
|
||||
|
||||
This command runs every fixer except the ``apply`` fixer::
|
||||
|
||||
$ 2to3 -x apply example.py
|
||||
|
||||
Some fixers are *explicit*, meaning they aren't run be default and must be
|
||||
listed on the command line to be run. Here, in addition to the default fixers,
|
||||
the ``idioms`` fixer is run::
|
||||
|
@ -78,8 +82,8 @@ flag. Note that *only* doctests will be refactored. This also doesn't require
|
|||
the module to be valid Python. For example, doctest like examples in a reST
|
||||
document could also be refactored with this option.
|
||||
|
||||
The :option:`-v` option enables the output of more information on the
|
||||
translation process.
|
||||
The :option:`-v` option enables output of more information on the translation
|
||||
process.
|
||||
|
||||
When the :option:`-p` is passed, 2to3 treats ``print`` as a function instead of
|
||||
a statement. This is useful when ``from __future__ import print_function`` is
|
||||
|
|
|
@ -228,7 +228,9 @@ Notes on using :class:`Set` and :class:`MutableSet` as a mixin:
|
|||
|
||||
In addition to the above, deques support iteration, pickling, ``len(d)``,
|
||||
``reversed(d)``, ``copy.copy(d)``, ``copy.deepcopy(d)``, membership testing with
|
||||
the :keyword:`in` operator, and subscript references such as ``d[-1]``.
|
||||
the :keyword:`in` operator, and subscript references such as ``d[-1]``. Indexed
|
||||
access is O(1) at both ends but slows to O(n) in the middle. For fast random
|
||||
access, use lists instead.
|
||||
|
||||
Example:
|
||||
|
||||
|
|
|
@ -677,7 +677,8 @@ the more significant byte last.
|
|||
opcode finds the keyword parameters first. For each keyword argument, the value
|
||||
is on top of the key. Below the keyword parameters, the positional parameters
|
||||
are on the stack, with the right-most parameter on top. Below the parameters,
|
||||
the function object to call is on the stack.
|
||||
the function object to call is on the stack. Pops all function arguments, and
|
||||
the function itself off the stack, and pushes the return value.
|
||||
|
||||
|
||||
.. opcode:: MAKE_FUNCTION (argc)
|
||||
|
|
|
@ -889,7 +889,8 @@ are always available. They are listed here in alphabetical order.
|
|||
best explained with an example::
|
||||
|
||||
class C(object):
|
||||
def __init__(self): self._x = None
|
||||
def __init__(self):
|
||||
self._x = None
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
|
|
|
@ -93,7 +93,7 @@ The module also offers three general purpose functions based on heaps.
|
|||
|
||||
Merge multiple sorted inputs into a single sorted output (for example, merge
|
||||
timestamped entries from multiple log files). Returns an :term:`iterator`
|
||||
over over the sorted values.
|
||||
over the sorted values.
|
||||
|
||||
Similar to ``sorted(itertools.chain(*iterables))`` but returns an iterable, does
|
||||
not pull the data into memory all at once, and assumes that each of the input
|
||||
|
|
|
@ -376,8 +376,8 @@ The :mod:`multiprocessing` package mostly replicates the API of the
|
|||
|
||||
Example usage of some of the methods of :class:`Process`::
|
||||
|
||||
>>> import processing, time, signal
|
||||
>>> p = processing.Process(target=time.sleep, args=(1000,))
|
||||
>>> import multiprocessing, time, signal
|
||||
>>> p = multiprocessing.Process(target=time.sleep, args=(1000,))
|
||||
>>> print(p, p.is_alive())
|
||||
<Process(Process-1, initial)> False
|
||||
>>> p.start()
|
||||
|
@ -1779,12 +1779,12 @@ handler type) for messages from different processes to get mixed up.
|
|||
|
||||
Below is an example session with logging turned on::
|
||||
|
||||
>>> import processing, logging
|
||||
>>> logger = processing.getLogger()
|
||||
>>> import multiprocessing, logging
|
||||
>>> logger = multiprocessing.getLogger()
|
||||
>>> logger.setLevel(logging.INFO)
|
||||
>>> logger.warning('doomed')
|
||||
[WARNING/MainProcess] doomed
|
||||
>>> m = processing.Manager()
|
||||
>>> m = multiprocessing.Manager()
|
||||
[INFO/SyncManager-1] child process calling self.run()
|
||||
[INFO/SyncManager-1] manager bound to '\\\\.\\pipe\\pyc-2776-0-lj0tfa'
|
||||
>>> del m
|
||||
|
|
|
@ -179,7 +179,7 @@ of Stellenbosch, South Africa. Martin von Loewis put a
|
|||
lot of effort into importing existing bugs and patches from
|
||||
SourceForge; his scripts for this import operation are at
|
||||
http://svn.python.org/view/tracker/importer/ and may be useful to
|
||||
other projects wished to move from SourceForge to Roundup.
|
||||
other projects wishing to move from SourceForge to Roundup.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
@ -3282,5 +3282,6 @@ Acknowledgements
|
|||
The author would like to thank the following people for offering
|
||||
suggestions, corrections and assistance with various drafts of this
|
||||
article: Georg Brandl, Steve Brown, Nick Coghlan, Ralph Corderoy,
|
||||
Jim Jewett, Kent Johnson, Chris Lambacher, Antoine Pitrou, Brian Warner.
|
||||
Jim Jewett, Kent Johnson, Chris Lambacher, Martin Michlmayr,
|
||||
Antoine Pitrou, Brian Warner.
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ important changes between major Python versions. They are a "must read" for
|
|||
anyone wishing to stay up-to-date after a new release.
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 1
|
||||
:maxdepth: 2
|
||||
|
||||
3.0.rst
|
||||
2.7.rst
|
||||
|
|
|
@ -373,7 +373,7 @@ typedef PY_UNICODE_TYPE Py_UNICODE;
|
|||
in most situations is solely ASCII whitespace, we optimize for the common
|
||||
case by using a quick look-up table with an inlined check.
|
||||
*/
|
||||
extern const unsigned char _Py_ascii_whitespace[];
|
||||
PyAPI_DATA(const unsigned char) _Py_ascii_whitespace[];
|
||||
|
||||
#define Py_UNICODE_ISSPACE(ch) \
|
||||
((ch) < 128U ? _Py_ascii_whitespace[(ch)] : _PyUnicode_IsWhitespace(ch))
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open("http://xkcd.com/353/")
|
|
@ -52,18 +52,22 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def idle_showwarning(message, category, filename, lineno):
|
||||
def idle_showwarning(message, category, filename, lineno,
|
||||
file=None, line=None):
|
||||
file = warning_stream
|
||||
try:
|
||||
file.write(warnings.formatwarning(message, category, filename, lineno))
|
||||
file.write(warnings.formatwarning(message, category, filename,\
|
||||
lineno, file=file, line=line))
|
||||
except IOError:
|
||||
pass ## file (probably __stderr__) is invalid, warning dropped.
|
||||
warnings.showwarning = idle_showwarning
|
||||
def idle_formatwarning(message, category, filename, lineno):
|
||||
def idle_formatwarning(message, category, filename, lineno,
|
||||
file=None, line=None):
|
||||
"""Format warnings the IDLE way"""
|
||||
s = "\nWarning (from warnings module):\n"
|
||||
s += ' File \"%s\", line %s\n' % (filename, lineno)
|
||||
line = linecache.getline(filename, lineno).strip()
|
||||
line = linecache.getline(filename, lineno).strip() \
|
||||
if line is None else line
|
||||
if line:
|
||||
s += " %s\n" % line
|
||||
s += "%s: %s\n>>> " % (category.__name__, message)
|
||||
|
|
|
@ -24,11 +24,13 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
def idle_formatwarning_subproc(message, category, filename, lineno):
|
||||
def idle_formatwarning_subproc(message, category, filename, lineno,
|
||||
file=None, line=None):
|
||||
"""Format warnings the IDLE way"""
|
||||
s = "\nWarning (from warnings module):\n"
|
||||
s += ' File \"%s\", line %s\n' % (filename, lineno)
|
||||
line = linecache.getline(filename, lineno).strip()
|
||||
line = linecache.getline(filename, lineno).strip() \
|
||||
if line is None else line
|
||||
if line:
|
||||
s += " %s\n" % line
|
||||
s += "%s: %s\n" % (category.__name__, message)
|
||||
|
|
|
@ -28,15 +28,19 @@ class FixNext(fixer_base.BaseFix):
|
|||
any* > >
|
||||
|
|
||||
global=global_stmt< 'global' any* 'next' any* >
|
||||
|
|
||||
mod=file_input< any+ >
|
||||
"""
|
||||
|
||||
order = "pre" # Pre-order tree traversal
|
||||
|
||||
def start_tree(self, tree, filename):
|
||||
super(FixNext, self).start_tree(tree, filename)
|
||||
self.shadowed_next = False
|
||||
|
||||
n = find_binding('next', tree)
|
||||
if n:
|
||||
self.warning(n, bind_warning)
|
||||
self.shadowed_next = True
|
||||
else:
|
||||
self.shadowed_next = False
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
|
@ -69,11 +73,6 @@ class FixNext(fixer_base.BaseFix):
|
|||
elif "global" in results:
|
||||
self.warning(node, bind_warning)
|
||||
self.shadowed_next = True
|
||||
elif mod:
|
||||
n = find_binding('next', mod)
|
||||
if n:
|
||||
self.warning(n, bind_warning)
|
||||
self.shadowed_next = True
|
||||
|
||||
|
||||
### The following functions help test if node is part of an assignment
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
"""
|
||||
Optional fixer to transform set() calls to set literals.
|
||||
"""
|
||||
|
||||
# Author: Benjamin Peterson
|
||||
|
||||
from lib2to3 import fixer_base, pytree
|
||||
from lib2to3.fixer_util import token, syms
|
||||
|
||||
|
||||
|
||||
class FixSetLiteral(fixer_base.BaseFix):
|
||||
|
||||
explicit = True
|
||||
|
||||
PATTERN = """power< 'set' trailer< '('
|
||||
(atom=atom< '[' (items=listmaker< any ((',' any)* [',']) >
|
||||
|
|
||||
single=any) ']' >
|
||||
|
|
||||
atom< '(' items=testlist_gexp< any ((',' any)* [',']) > ')' >
|
||||
)
|
||||
')' > >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
single = results.get("single")
|
||||
if single:
|
||||
# Make a fake listmaker
|
||||
fake = pytree.Node(syms.listmaker, [single.clone()])
|
||||
single.replace(fake)
|
||||
items = fake
|
||||
else:
|
||||
items = results["items"]
|
||||
|
||||
# Build the contents of the literal
|
||||
literal = [pytree.Leaf(token.LBRACE, "{")]
|
||||
literal.extend(n.clone() for n in items.children)
|
||||
literal.append(pytree.Leaf(token.RBRACE, "}"))
|
||||
# Set the prefix of the right brace to that of the ')' or ']'
|
||||
literal[-1].set_prefix(items.get_next_sibling().get_prefix())
|
||||
maker = pytree.Node(syms.dictsetmaker, literal)
|
||||
maker.set_prefix(node.get_prefix())
|
||||
|
||||
# If the original was a one tuple, we need to remove the extra comma.
|
||||
if len(maker.children) == 4:
|
||||
n = maker.children[2]
|
||||
n.remove()
|
||||
maker.children[-1].set_prefix(n.get_prefix())
|
||||
|
||||
# Finally, replace the set call with our shiny new literal.
|
||||
return maker
|
|
@ -15,10 +15,31 @@ class StdoutRefactoringTool(refactor.RefactoringTool):
|
|||
Prints output to stdout.
|
||||
"""
|
||||
|
||||
def __init__(self, fixers, options, explicit, nobackups):
|
||||
self.nobackups = nobackups
|
||||
super(StdoutRefactoringTool, self).__init__(fixers, options, explicit)
|
||||
|
||||
def log_error(self, msg, *args, **kwargs):
|
||||
self.errors.append((msg, args, kwargs))
|
||||
self.logger.error(msg, *args, **kwargs)
|
||||
|
||||
def write_file(self, new_text, filename, old_text):
|
||||
if not self.nobackups:
|
||||
# Make backup
|
||||
backup = filename + ".bak"
|
||||
if os.path.lexists(backup):
|
||||
try:
|
||||
os.remove(backup)
|
||||
except os.error as err:
|
||||
self.log_message("Can't remove backup %s", backup)
|
||||
try:
|
||||
os.rename(filename, backup)
|
||||
except os.error as err:
|
||||
self.log_message("Can't rename %s to %s", filename, backup)
|
||||
# Actually write the new file
|
||||
super(StdoutRefactoringTool, self).write_file(new_text,
|
||||
filename, old_text)
|
||||
|
||||
def print_output(self, lines):
|
||||
for line in lines:
|
||||
print(line)
|
||||
|
@ -39,7 +60,9 @@ def main(fixer_pkg, args=None):
|
|||
parser.add_option("-d", "--doctests_only", action="store_true",
|
||||
help="Fix up doctests only")
|
||||
parser.add_option("-f", "--fix", action="append", default=[],
|
||||
help="Each FIX specifies a transformation; default all")
|
||||
help="Each FIX specifies a transformation; default: all")
|
||||
parser.add_option("-x", "--nofix", action="append", default=[],
|
||||
help="Prevent a fixer from being run.")
|
||||
parser.add_option("-l", "--list-fixes", action="store_true",
|
||||
help="List available transformations (fixes/fix_*.py)")
|
||||
parser.add_option("-p", "--print-function", action="store_true",
|
||||
|
@ -48,10 +71,14 @@ def main(fixer_pkg, args=None):
|
|||
help="More verbose logging")
|
||||
parser.add_option("-w", "--write", action="store_true",
|
||||
help="Write back modified files")
|
||||
parser.add_option("-n", "--nobackups", action="store_true", default=False,
|
||||
help="Don't write backups for modified files.")
|
||||
|
||||
# Parse command line arguments
|
||||
refactor_stdin = False
|
||||
options, args = parser.parse_args(args)
|
||||
if not options.write and options.nobackups:
|
||||
parser.error("Can't use -n without -w")
|
||||
if options.list_fixes:
|
||||
print("Available transformations for the -f/--fix option:")
|
||||
for fixname in refactor.get_all_fix_names(fixer_pkg):
|
||||
|
@ -74,15 +101,22 @@ def main(fixer_pkg, args=None):
|
|||
|
||||
# Initialize the refactoring tool
|
||||
rt_opts = {"print_function" : options.print_function}
|
||||
avail_names = refactor.get_fixers_from_package(fixer_pkg)
|
||||
explicit = []
|
||||
avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))
|
||||
unwanted_fixes = set(fixer_pkg + ".fix_" + fix for fix in options.nofix)
|
||||
explicit = set()
|
||||
if options.fix:
|
||||
explicit = [fixer_pkg + ".fix_" + fix
|
||||
for fix in options.fix if fix != "all"]
|
||||
fixer_names = avail_names if "all" in options.fix else explicit
|
||||
all_present = False
|
||||
for fix in options.fix:
|
||||
if fix == "all":
|
||||
all_present = True
|
||||
else:
|
||||
explicit.add(fixer_pkg + ".fix_" + fix)
|
||||
requested = avail_fixes.union(explicit) if all_present else explicit
|
||||
else:
|
||||
fixer_names = avail_names
|
||||
rt = StdoutRefactoringTool(fixer_names, rt_opts, explicit=explicit)
|
||||
requested = avail_fixes.union(explicit)
|
||||
fixer_names = requested.difference(unwanted_fixes)
|
||||
rt = StdoutRefactoringTool(sorted(fixer_names), rt_opts, sorted(explicit),
|
||||
options.nobackups)
|
||||
|
||||
# Refactor all files and directories passed as arguments
|
||||
if not rt.errors:
|
||||
|
|
|
@ -36,9 +36,7 @@ def get_all_fix_names(fixer_pkg, remove_prefix=True):
|
|||
pkg = __import__(fixer_pkg, [], [], ["*"])
|
||||
fixer_dir = os.path.dirname(pkg.__file__)
|
||||
fix_names = []
|
||||
names = os.listdir(fixer_dir)
|
||||
names.sort()
|
||||
for name in names:
|
||||
for name in sorted(os.listdir(fixer_dir)):
|
||||
if name.startswith("fix_") and name.endswith(".py"):
|
||||
if remove_prefix:
|
||||
name = name[4:]
|
||||
|
@ -253,7 +251,7 @@ class RefactoringTool(object):
|
|||
there were errors during the parse.
|
||||
"""
|
||||
try:
|
||||
tree = self.driver.parse_string(data,1)
|
||||
tree = self.driver.parse_string(data)
|
||||
except Exception as err:
|
||||
self.log_error("Can't parse %s: %s: %s",
|
||||
name, err.__class__.__name__, err)
|
||||
|
@ -352,23 +350,13 @@ class RefactoringTool(object):
|
|||
else:
|
||||
self.log_debug("Not writing changes to %s", filename)
|
||||
|
||||
def write_file(self, new_text, filename, old_text=None):
|
||||
def write_file(self, new_text, filename, old_text):
|
||||
"""Writes a string to a file.
|
||||
|
||||
It first shows a unified diff between the old text and the new text, and
|
||||
then rewrites the file; the latter is only done if the write option is
|
||||
set.
|
||||
"""
|
||||
backup = filename + ".bak"
|
||||
if os.path.lexists(backup):
|
||||
try:
|
||||
os.remove(backup)
|
||||
except os.error as err:
|
||||
self.log_message("Can't remove backup %s", backup)
|
||||
try:
|
||||
os.rename(filename, backup)
|
||||
except os.error as err:
|
||||
self.log_message("Can't rename %s to %s", filename, backup)
|
||||
try:
|
||||
f = open(filename, "w")
|
||||
except os.error as err:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
Files in this directory:
|
||||
In this directory:
|
||||
- py2_test_grammar.py -- test file that exercises most/all of Python 2.x's grammar.
|
||||
- py3_test_grammar.py -- test file that exercises most/all of Python 3.x's grammar.
|
||||
- infinite_recursion.py -- test file that causes lib2to3's faster recursive pattern matching
|
||||
scheme to fail, but passes when lib2to3 falls back to iterative pattern matching.
|
||||
- fixes/ -- for use by test_refactor.py
|
||||
|
|
|
@ -3385,6 +3385,134 @@ class Test_import(FixerTestCase):
|
|||
"""
|
||||
self.check_both(b, a)
|
||||
|
||||
|
||||
class Test_set_literal(FixerTestCase):
|
||||
|
||||
fixer = "set_literal"
|
||||
|
||||
def test_basic(self):
|
||||
b = """set([1, 2, 3])"""
|
||||
a = """{1, 2, 3}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set((1, 2, 3))"""
|
||||
a = """{1, 2, 3}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set((1,))"""
|
||||
a = """{1}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([1])"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set((a, b))"""
|
||||
a = """{a, b}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([a, b])"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set((a*234, f(args=23)))"""
|
||||
a = """{a*234, f(args=23)}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([a*23, f(23)])"""
|
||||
a = """{a*23, f(23)}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([a-234**23])"""
|
||||
a = """{a-234**23}"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_listcomps(self):
|
||||
b = """set([x for x in y])"""
|
||||
a = """{x for x in y}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([x for x in y if x == m])"""
|
||||
a = """{x for x in y if x == m}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([x for x in y for a in b])"""
|
||||
a = """{x for x in y for a in b}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([f(x) - 23 for x in y])"""
|
||||
a = """{f(x) - 23 for x in y}"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_whitespace(self):
|
||||
b = """set( [1, 2])"""
|
||||
a = """{1, 2}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([1 , 2])"""
|
||||
a = """{1 , 2}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([ 1 ])"""
|
||||
a = """{ 1 }"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set( [1] )"""
|
||||
a = """{1}"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([ 1, 2 ])"""
|
||||
a = """{ 1, 2 }"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set([x for x in y ])"""
|
||||
a = """{x for x in y }"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """set(
|
||||
[1, 2]
|
||||
)
|
||||
"""
|
||||
a = """{1, 2}\n"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_comments(self):
|
||||
b = """set((1, 2)) # Hi"""
|
||||
a = """{1, 2} # Hi"""
|
||||
self.check(b, a)
|
||||
|
||||
# This isn't optimal behavior, but the fixer is optional.
|
||||
b = """
|
||||
# Foo
|
||||
set( # Bar
|
||||
(1, 2)
|
||||
)
|
||||
"""
|
||||
a = """
|
||||
# Foo
|
||||
{1, 2}
|
||||
"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_unchanged(self):
|
||||
s = """set()"""
|
||||
self.unchanged(s)
|
||||
|
||||
s = """set(a)"""
|
||||
self.unchanged(s)
|
||||
|
||||
s = """set(a, b, c)"""
|
||||
self.unchanged(s)
|
||||
|
||||
# Don't transform generators because they might have to be lazy.
|
||||
s = """set(x for x in y)"""
|
||||
self.unchanged(s)
|
||||
|
||||
s = """set(x for x in y if z)"""
|
||||
self.unchanged(s)
|
||||
|
||||
s = """set(a*823-23**2 + f(23))"""
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_sys_exc(FixerTestCase):
|
||||
fixer = "sys_exc"
|
||||
|
||||
|
|
|
@ -353,29 +353,29 @@ class TestPatterns(support.TestCase):
|
|||
# Build a pattern matching a leaf
|
||||
pl = pytree.LeafPattern(100, "foo", name="pl")
|
||||
r = {}
|
||||
self.assertEqual(pl.match(root, results=r), False)
|
||||
self.assertFalse(pl.match(root, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pl.match(n1, results=r), False)
|
||||
self.assertFalse(pl.match(n1, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pl.match(n2, results=r), False)
|
||||
self.assertFalse(pl.match(n2, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pl.match(l1, results=r), True)
|
||||
self.assertTrue(pl.match(l1, results=r))
|
||||
self.assertEqual(r, {"pl": l1})
|
||||
r = {}
|
||||
self.assertEqual(pl.match(l2, results=r), False)
|
||||
self.assertFalse(pl.match(l2, results=r))
|
||||
self.assertEqual(r, {})
|
||||
# Build a pattern matching a node
|
||||
pn = pytree.NodePattern(1000, [pl], name="pn")
|
||||
self.assertEqual(pn.match(root, results=r), False)
|
||||
self.assertFalse(pn.match(root, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pn.match(n1, results=r), False)
|
||||
self.assertFalse(pn.match(n1, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pn.match(n2, results=r), True)
|
||||
self.assertTrue(pn.match(n2, results=r))
|
||||
self.assertEqual(r, {"pn": n2, "pl": l3})
|
||||
r = {}
|
||||
self.assertEqual(pn.match(l1, results=r), False)
|
||||
self.assertFalse(pn.match(l1, results=r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pn.match(l2, results=r), False)
|
||||
self.assertFalse(pn.match(l2, results=r))
|
||||
self.assertEqual(r, {})
|
||||
|
||||
def testWildcardPatterns(self):
|
||||
|
@ -391,11 +391,11 @@ class TestPatterns(support.TestCase):
|
|||
pn = pytree.NodePattern(1000, [pl], name="pn")
|
||||
pw = pytree.WildcardPattern([[pn], [pl, pl]], name="pw")
|
||||
r = {}
|
||||
self.assertEqual(pw.match_seq([root], r), False)
|
||||
self.assertFalse(pw.match_seq([root], r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pw.match_seq([n1], r), False)
|
||||
self.assertFalse(pw.match_seq([n1], r))
|
||||
self.assertEqual(r, {})
|
||||
self.assertEqual(pw.match_seq([n2], r), True)
|
||||
self.assertTrue(pw.match_seq([n2], r))
|
||||
# These are easier to debug
|
||||
self.assertEqual(sorted(r.keys()), ["pl", "pn", "pw"])
|
||||
self.assertEqual(r["pl"], l1)
|
||||
|
@ -404,7 +404,7 @@ class TestPatterns(support.TestCase):
|
|||
# But this is equivalent
|
||||
self.assertEqual(r, {"pl": l1, "pn": n2, "pw": [n2]})
|
||||
r = {}
|
||||
self.assertEqual(pw.match_seq([l1, l3], r), True)
|
||||
self.assertTrue(pw.match_seq([l1, l3], r))
|
||||
self.assertEqual(r, {"pl": l3, "pw": [l1, l3]})
|
||||
self.assert_(r["pl"] is l3)
|
||||
r = {}
|
||||
|
|
|
@ -123,7 +123,6 @@ class TestRefactoringTool(unittest.TestCase):
|
|||
|
||||
def test_refactor_file(self):
|
||||
test_file = os.path.join(FIXER_DIR, "parrot_example.py")
|
||||
backup = test_file + ".bak"
|
||||
old_contents = open(test_file, "r").read()
|
||||
rt = self.rt()
|
||||
|
||||
|
@ -133,14 +132,8 @@ class TestRefactoringTool(unittest.TestCase):
|
|||
rt.refactor_file(test_file, True)
|
||||
try:
|
||||
self.assertNotEqual(old_contents, open(test_file, "r").read())
|
||||
self.assertTrue(os.path.exists(backup))
|
||||
self.assertEqual(old_contents, open(backup, "r").read())
|
||||
finally:
|
||||
open(test_file, "w").write(old_contents)
|
||||
try:
|
||||
os.unlink(backup)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_refactor_docstring(self):
|
||||
rt = self.rt()
|
||||
|
|
|
@ -518,6 +518,10 @@ test_u_code(PyObject *self)
|
|||
Py_UNICODE *value;
|
||||
Py_ssize_t len;
|
||||
|
||||
/* issue4122: Undefined reference to _Py_ascii_whitespace on Windows */
|
||||
/* Just use the macro and check that it compiles */
|
||||
int x = Py_UNICODE_ISSPACE(25);
|
||||
|
||||
tuple = PyTuple_New(1);
|
||||
if (tuple == NULL)
|
||||
return NULL;
|
||||
|
|
|
@ -499,10 +499,6 @@ win32_error_unicode(char* function, Py_UNICODE* filename)
|
|||
return PyErr_SetFromWindowsErr(errno);
|
||||
}
|
||||
|
||||
static PyObject *_PyUnicode_FromFileSystemEncodedObject(register PyObject *obj)
|
||||
{
|
||||
}
|
||||
|
||||
static int
|
||||
convert_to_unicode(PyObject **param)
|
||||
{
|
||||
|
@ -713,7 +709,7 @@ win32_1str(PyObject* args, char* func,
|
|||
chdir is essentially a wrapper around SetCurrentDirectory; however,
|
||||
it also needs to set "magic" environment variables indicating
|
||||
the per-drive current directory, which are of the form =<drive>: */
|
||||
BOOL __stdcall
|
||||
static BOOL __stdcall
|
||||
win32_chdir(LPCSTR path)
|
||||
{
|
||||
char new_path[MAX_PATH+1];
|
||||
|
@ -738,7 +734,7 @@ win32_chdir(LPCSTR path)
|
|||
|
||||
/* The Unicode version differs from the ANSI version
|
||||
since the current directory might exceed MAX_PATH characters */
|
||||
BOOL __stdcall
|
||||
static BOOL __stdcall
|
||||
win32_wchdir(LPCWSTR path)
|
||||
{
|
||||
wchar_t _new_path[MAX_PATH+1], *new_path = _new_path;
|
||||
|
|
|
@ -804,12 +804,30 @@ static int obj2ast_int(PyObject* obj, int* out, PyArena* arena)
|
|||
return 0;
|
||||
}
|
||||
|
||||
static int add_ast_fields()
|
||||
{
|
||||
PyObject *empty_tuple, *d;
|
||||
if (PyType_Ready(&AST_type) < 0)
|
||||
return -1;
|
||||
d = AST_type.tp_dict;
|
||||
empty_tuple = PyTuple_New(0);
|
||||
if (!empty_tuple ||
|
||||
PyDict_SetItemString(d, "_fields", empty_tuple) < 0 ||
|
||||
PyDict_SetItemString(d, "_attributes", empty_tuple) < 0) {
|
||||
Py_XDECREF(empty_tuple);
|
||||
return -1;
|
||||
}
|
||||
Py_DECREF(empty_tuple);
|
||||
return 0;
|
||||
}
|
||||
|
||||
""", 0, reflow=False)
|
||||
|
||||
self.emit("static int init_types(void)",0)
|
||||
self.emit("{", 0)
|
||||
self.emit("static int initialized;", 1)
|
||||
self.emit("if (initialized) return 1;", 1)
|
||||
self.emit("if (add_ast_fields() < 0) return 0;", 1)
|
||||
for dfn in mod.dfns:
|
||||
self.visit(dfn)
|
||||
self.emit("initialized = 1;", 1)
|
||||
|
|
|
@ -99,7 +99,8 @@ def check(file):
|
|||
for name in names:
|
||||
fullname = os.path.join(file, name)
|
||||
if ((recurse and os.path.isdir(fullname) and
|
||||
not os.path.islink(fullname))
|
||||
not os.path.islink(fullname) and
|
||||
not os.path.split(fullname)[1].startswith("."))
|
||||
or name.lower().endswith(".py")):
|
||||
check(fullname)
|
||||
return
|
||||
|
|
Loading…
Reference in New Issue