Merged revisions 68503,68507,68694,69054,69673,69679-69681,70991,70999,71003,71695 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r68503 | benjamin.peterson | 2009-01-10 14:14:49 -0600 (Sat, 10 Jan 2009) | 1 line use variable ........ r68507 | benjamin.peterson | 2009-01-10 15:13:16 -0600 (Sat, 10 Jan 2009) | 1 line rewrap ........ r68694 | benjamin.peterson | 2009-01-17 17:55:59 -0600 (Sat, 17 Jan 2009) | 1 line test for specific node type ........ r69054 | guilherme.polo | 2009-01-28 10:01:54 -0600 (Wed, 28 Jan 2009) | 2 lines Added mapping for the ttk module. ........ r69673 | benjamin.peterson | 2009-02-16 09:38:22 -0600 (Mon, 16 Feb 2009) | 1 line fix handling of as imports #5279 ........ r69679 | benjamin.peterson | 2009-02-16 11:36:06 -0600 (Mon, 16 Feb 2009) | 1 line make Base.get_next_sibling() and Base.get_prev_sibling() properties ........ r69680 | benjamin.peterson | 2009-02-16 11:41:48 -0600 (Mon, 16 Feb 2009) | 1 line normalize docstrings in pytree according to PEP 11 ........ r69681 | benjamin.peterson | 2009-02-16 11:43:09 -0600 (Mon, 16 Feb 2009) | 1 line use a set ........ r70991 | benjamin.peterson | 2009-04-01 15:54:50 -0500 (Wed, 01 Apr 2009) | 1 line map urllib.urlopen to urllib.request.open #5637 ........ r70999 | benjamin.peterson | 2009-04-01 17:36:47 -0500 (Wed, 01 Apr 2009) | 1 line add very alpha support to 2to3 for running concurrently with multiprocessing ........ r71003 | benjamin.peterson | 2009-04-01 18:10:43 -0500 (Wed, 01 Apr 2009) | 1 line fix when multiprocessing is not available or used ........ r71695 | benjamin.peterson | 2009-04-17 22:21:29 -0500 (Fri, 17 Apr 2009) | 1 line refactor multiprocessing support, so it's less hacky to employ and only loads mp when needed ........
This commit is contained in:
parent
c6c1f96420
commit
eaeb4c695e
|
@ -226,7 +226,7 @@ def is_probably_builtin(node):
|
|||
"""
|
||||
Check that something isn't an attribute or function name etc.
|
||||
"""
|
||||
prev = node.get_prev_sibling()
|
||||
prev = node.prev_sibling
|
||||
if prev is not None and prev.type == token.DOT:
|
||||
# Attribute lookup.
|
||||
return False
|
||||
|
|
|
@ -25,11 +25,11 @@ The following cases will be converted:
|
|||
from .. import pytree
|
||||
from ..pgen2 import token
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list
|
||||
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, syms
|
||||
|
||||
def find_excepts(nodes):
|
||||
for i, n in enumerate(nodes):
|
||||
if isinstance(n, pytree.Node):
|
||||
if n.type == syms.except_clause:
|
||||
if n.children[0].value == 'except':
|
||||
yield (n, nodes[i+2])
|
||||
|
||||
|
|
|
@ -27,6 +27,7 @@ MAPPING = {'StringIO': 'io',
|
|||
'ScrolledText': 'tkinter.scrolledtext',
|
||||
'Tkconstants': 'tkinter.constants',
|
||||
'Tix': 'tkinter.tix',
|
||||
'ttk': 'tkinter.ttk',
|
||||
'Tkinter': 'tkinter',
|
||||
'markupbase': '_markupbase',
|
||||
'_winreg': 'winreg',
|
||||
|
@ -121,17 +122,18 @@ class FixImports(fixer_base.BaseFix):
|
|||
def transform(self, node, results):
|
||||
import_mod = results.get("module_name")
|
||||
if import_mod:
|
||||
new_name = self.mapping[import_mod.value]
|
||||
mod_name = import_mod.value
|
||||
new_name = self.mapping[mod_name]
|
||||
import_mod.replace(Name(new_name, prefix=import_mod.get_prefix()))
|
||||
if "name_import" in results:
|
||||
# If it's not a "from x import x, y" or "import x as y" import,
|
||||
# marked its usage to be replaced.
|
||||
self.replace[import_mod.value] = new_name
|
||||
self.replace[mod_name] = new_name
|
||||
if "multiple_imports" in results:
|
||||
# This is a nasty hack to fix multiple imports on a
|
||||
# line (e.g., "import StringIO, urlparse"). The problem is that I
|
||||
# can't figure out an easy way to make a pattern recognize the
|
||||
# keys of MAPPING randomly sprinkled in an import statement.
|
||||
# This is a nasty hack to fix multiple imports on a line (e.g.,
|
||||
# "import StringIO, urlparse"). The problem is that I can't
|
||||
# figure out an easy way to make a pattern recognize the keys of
|
||||
# MAPPING randomly sprinkled in an import statement.
|
||||
results = self.match(node)
|
||||
if results:
|
||||
self.transform(node, results)
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
||||
|
||||
# Local imports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import BlankLine
|
||||
from lib2to3 import fixer_base
|
||||
from lib2to3.fixer_util import BlankLine, syms, token
|
||||
|
||||
|
||||
class FixItertoolsImports(fixer_base.BaseFix):
|
||||
PATTERN = """
|
||||
|
@ -11,34 +12,40 @@ class FixItertoolsImports(fixer_base.BaseFix):
|
|||
|
||||
def transform(self, node, results):
|
||||
imports = results['imports']
|
||||
children = imports.children[:] or [imports]
|
||||
for child in children:
|
||||
if not hasattr(child, 'value'):
|
||||
# Handle 'import ... as ...'
|
||||
continue
|
||||
if child.value in ('imap', 'izip', 'ifilter'):
|
||||
# The value must be set to none in case child == import,
|
||||
# so that the test for empty imports will work out
|
||||
if imports.type == syms.import_as_name or not imports.children:
|
||||
children = [imports]
|
||||
else:
|
||||
children = imports.children
|
||||
for child in children[::2]:
|
||||
if child.type == token.NAME:
|
||||
member = child.value
|
||||
name_node = child
|
||||
else:
|
||||
assert child.type == syms.import_as_name
|
||||
name_node = child.children[0]
|
||||
member_name = name_node.value
|
||||
if member_name in ('imap', 'izip', 'ifilter'):
|
||||
child.value = None
|
||||
child.remove()
|
||||
elif child.value == 'ifilterfalse':
|
||||
elif member_name == 'ifilterfalse':
|
||||
node.changed()
|
||||
child.value = 'filterfalse'
|
||||
name_node.value = 'filterfalse'
|
||||
|
||||
# Make sure the import statement is still sane
|
||||
children = imports.children[:] or [imports]
|
||||
remove_comma = True
|
||||
for child in children:
|
||||
if remove_comma and getattr(child, 'value', None) == ',':
|
||||
if remove_comma and child.type == token.COMMA:
|
||||
child.remove()
|
||||
else:
|
||||
remove_comma ^= True
|
||||
|
||||
if unicode(children[-1]) == ',':
|
||||
if children[-1].type == token.COMMA:
|
||||
children[-1].remove()
|
||||
|
||||
# If there are no imports left, just get rid of the entire statement
|
||||
if not (imports.children or getattr(imports, 'value', None)):
|
||||
if not (imports.children or getattr(imports, 'value', None)) or \
|
||||
imports.parent is None:
|
||||
p = node.get_prefix()
|
||||
node = BlankLine()
|
||||
node.prefix = p
|
||||
|
|
|
@ -38,7 +38,7 @@ class FixSetLiteral(fixer_base.BaseFix):
|
|||
literal.extend(n.clone() for n in items.children)
|
||||
literal.append(pytree.Leaf(token.RBRACE, "}"))
|
||||
# Set the prefix of the right brace to that of the ')' or ']'
|
||||
literal[-1].set_prefix(items.get_next_sibling().get_prefix())
|
||||
literal[-1].set_prefix(items.next_sibling.get_prefix())
|
||||
maker = pytree.Node(syms.dictsetmaker, literal)
|
||||
maker.set_prefix(node.get_prefix())
|
||||
|
||||
|
|
|
@ -12,7 +12,7 @@ from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain
|
|||
MAPPING = {'urllib': [
|
||||
('urllib.request',
|
||||
['URLOpener', 'FancyURLOpener', 'urlretrieve',
|
||||
'_urlopener', 'urlcleanup']),
|
||||
'_urlopener', 'urlopen', 'urlcleanup']),
|
||||
('urllib.parse',
|
||||
['quote', 'quote_plus', 'unquote', 'unquote_plus',
|
||||
'urlencode', 'pathname2url', 'url2pathname', 'splitattr',
|
||||
|
|
|
@ -10,8 +10,7 @@ import optparse
|
|||
|
||||
from . import refactor
|
||||
|
||||
|
||||
class StdoutRefactoringTool(refactor.RefactoringTool):
|
||||
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
|
||||
"""
|
||||
Prints output to stdout.
|
||||
"""
|
||||
|
@ -64,6 +63,8 @@ def main(fixer_pkg, args=None):
|
|||
help="Fix up doctests only")
|
||||
parser.add_option("-f", "--fix", action="append", default=[],
|
||||
help="Each FIX specifies a transformation; default: all")
|
||||
parser.add_option("-j", "--processes", action="store", default=1,
|
||||
type="int", help="Run 2to3 concurrently")
|
||||
parser.add_option("-x", "--nofix", action="append", default=[],
|
||||
help="Prevent a fixer from being run.")
|
||||
parser.add_option("-l", "--list-fixes", action="store_true",
|
||||
|
@ -126,7 +127,14 @@ def main(fixer_pkg, args=None):
|
|||
if refactor_stdin:
|
||||
rt.refactor_stdin()
|
||||
else:
|
||||
rt.refactor(args, options.write, options.doctests_only)
|
||||
try:
|
||||
rt.refactor(args, options.write, options.doctests_only,
|
||||
options.processes)
|
||||
except refactor.MultiprocessingUnsupported:
|
||||
assert options.processes > 1
|
||||
print >> sys.stderr, "Sorry, -j isn't " \
|
||||
"supported on this platform."
|
||||
return 1
|
||||
rt.summarize()
|
||||
|
||||
# Return error status (0 if rt.errors is zero)
|
||||
|
|
|
@ -30,7 +30,7 @@ _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
|
|||
|
||||
def tokenize_wrapper(input):
|
||||
"""Tokenizes a string suppressing significant whitespace."""
|
||||
skip = (token.NEWLINE, token.INDENT, token.DEDENT)
|
||||
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
|
||||
tokens = tokenize.generate_tokens(driver.generate_lines(input).next)
|
||||
for quintuple in tokens:
|
||||
type, value, start, end, line_text = quintuple
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# Copyright 2006 Google, Inc. All Rights Reserved.
|
||||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
"""Python parse tree definitions.
|
||||
"""
|
||||
Python parse tree definitions.
|
||||
|
||||
This is a very concrete parse tree; we need to keep every token and
|
||||
even the comments and whitespace between tokens.
|
||||
|
@ -31,7 +32,8 @@ def type_repr(type_num):
|
|||
|
||||
class Base(object):
|
||||
|
||||
"""Abstract base class for Node and Leaf.
|
||||
"""
|
||||
Abstract base class for Node and Leaf.
|
||||
|
||||
This provides some default functionality and boilerplate using the
|
||||
template pattern.
|
||||
|
@ -51,7 +53,8 @@ class Base(object):
|
|||
return object.__new__(cls)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares two nodes for equality.
|
||||
"""
|
||||
Compare two nodes for equality.
|
||||
|
||||
This calls the method _eq().
|
||||
"""
|
||||
|
@ -60,7 +63,8 @@ class Base(object):
|
|||
return self._eq(other)
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Compares two nodes for inequality.
|
||||
"""
|
||||
Compare two nodes for inequality.
|
||||
|
||||
This calls the method _eq().
|
||||
"""
|
||||
|
@ -69,53 +73,58 @@ class Base(object):
|
|||
return not self._eq(other)
|
||||
|
||||
def _eq(self, other):
|
||||
"""Compares two nodes for equality.
|
||||
"""
|
||||
Compare two nodes for equality.
|
||||
|
||||
This is called by __eq__ and __ne__. It is only called if the
|
||||
two nodes have the same type. This must be implemented by the
|
||||
concrete subclass. Nodes should be considered equal if they
|
||||
have the same structure, ignoring the prefix string and other
|
||||
context information.
|
||||
This is called by __eq__ and __ne__. It is only called if the two nodes
|
||||
have the same type. This must be implemented by the concrete subclass.
|
||||
Nodes should be considered equal if they have the same structure,
|
||||
ignoring the prefix string and other context information.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def clone(self):
|
||||
"""Returns a cloned (deep) copy of self.
|
||||
"""
|
||||
Return a cloned (deep) copy of self.
|
||||
|
||||
This must be implemented by the concrete subclass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def post_order(self):
|
||||
"""Returns a post-order iterator for the tree.
|
||||
"""
|
||||
Return a post-order iterator for the tree.
|
||||
|
||||
This must be implemented by the concrete subclass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def pre_order(self):
|
||||
"""Returns a pre-order iterator for the tree.
|
||||
"""
|
||||
Return a pre-order iterator for the tree.
|
||||
|
||||
This must be implemented by the concrete subclass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def set_prefix(self, prefix):
|
||||
"""Sets the prefix for the node (see Leaf class).
|
||||
"""
|
||||
Set the prefix for the node (see Leaf class).
|
||||
|
||||
This must be implemented by the concrete subclass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_prefix(self):
|
||||
"""Returns the prefix for the node (see Leaf class).
|
||||
"""
|
||||
Return the prefix for the node (see Leaf class).
|
||||
|
||||
This must be implemented by the concrete subclass.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def replace(self, new):
|
||||
"""Replaces this node with a new one in the parent."""
|
||||
"""Replace this node with a new one in the parent."""
|
||||
assert self.parent is not None, str(self)
|
||||
assert new is not None
|
||||
if not isinstance(new, list):
|
||||
|
@ -138,7 +147,7 @@ class Base(object):
|
|||
self.parent = None
|
||||
|
||||
def get_lineno(self):
|
||||
"""Returns the line number which generated the invocant node."""
|
||||
"""Return the line number which generated the invocant node."""
|
||||
node = self
|
||||
while not isinstance(node, Leaf):
|
||||
if not node.children:
|
||||
|
@ -152,8 +161,10 @@ class Base(object):
|
|||
self.was_changed = True
|
||||
|
||||
def remove(self):
|
||||
"""Remove the node from the tree. Returns the position of the node
|
||||
in its parent's children before it was removed."""
|
||||
"""
|
||||
Remove the node from the tree. Returns the position of the node in its
|
||||
parent's children before it was removed.
|
||||
"""
|
||||
if self.parent:
|
||||
for i, node in enumerate(self.parent.children):
|
||||
if node is self:
|
||||
|
@ -162,10 +173,12 @@ class Base(object):
|
|||
self.parent = None
|
||||
return i
|
||||
|
||||
def get_next_sibling(self):
|
||||
"""Return the node immediately following the invocant in their
|
||||
parent's children list. If the invocant does not have a next
|
||||
sibling, return None."""
|
||||
@property
|
||||
def next_sibling(self):
|
||||
"""
|
||||
The node immediately following the invocant in their parent's children
|
||||
list. If the invocant does not have a next sibling, it is None
|
||||
"""
|
||||
if self.parent is None:
|
||||
return None
|
||||
|
||||
|
@ -177,10 +190,12 @@ class Base(object):
|
|||
except IndexError:
|
||||
return None
|
||||
|
||||
def get_prev_sibling(self):
|
||||
"""Return the node immediately preceding the invocant in their
|
||||
parent's children list. If the invocant does not have a previous
|
||||
sibling, return None."""
|
||||
@property
|
||||
def prev_sibling(self):
|
||||
"""
|
||||
The node immediately preceding the invocant in their parent's children
|
||||
list. If the invocant does not have a previous sibling, it is None.
|
||||
"""
|
||||
if self.parent is None:
|
||||
return None
|
||||
|
||||
|
@ -192,9 +207,11 @@ class Base(object):
|
|||
return self.parent.children[i-1]
|
||||
|
||||
def get_suffix(self):
|
||||
"""Return the string immediately following the invocant node. This
|
||||
is effectively equivalent to node.get_next_sibling().get_prefix()"""
|
||||
next_sib = self.get_next_sibling()
|
||||
"""
|
||||
Return the string immediately following the invocant node. This is
|
||||
effectively equivalent to node.next_sibling.get_prefix()
|
||||
"""
|
||||
next_sib = self.next_sibling
|
||||
if next_sib is None:
|
||||
return ""
|
||||
return next_sib.get_prefix()
|
||||
|
@ -205,7 +222,8 @@ class Node(Base):
|
|||
"""Concrete implementation for interior nodes."""
|
||||
|
||||
def __init__(self, type, children, context=None, prefix=None):
|
||||
"""Initializer.
|
||||
"""
|
||||
Initializer.
|
||||
|
||||
Takes a type constant (a symbol number >= 256), a sequence of
|
||||
child nodes, and an optional context keyword argument.
|
||||
|
@ -222,42 +240,44 @@ class Node(Base):
|
|||
self.set_prefix(prefix)
|
||||
|
||||
def __repr__(self):
|
||||
"""Returns a canonical string representation."""
|
||||
"""Return a canonical string representation."""
|
||||
return "%s(%s, %r)" % (self.__class__.__name__,
|
||||
type_repr(self.type),
|
||||
self.children)
|
||||
|
||||
def __str__(self):
|
||||
"""Returns a pretty string representation.
|
||||
"""
|
||||
Return a pretty string representation.
|
||||
|
||||
This reproduces the input source exactly.
|
||||
"""
|
||||
return "".join(map(str, self.children))
|
||||
|
||||
def _eq(self, other):
|
||||
"""Compares two nodes for equality."""
|
||||
"""Compare two nodes for equality."""
|
||||
return (self.type, self.children) == (other.type, other.children)
|
||||
|
||||
def clone(self):
|
||||
"""Returns a cloned (deep) copy of self."""
|
||||
"""Return a cloned (deep) copy of self."""
|
||||
return Node(self.type, [ch.clone() for ch in self.children])
|
||||
|
||||
def post_order(self):
|
||||
"""Returns a post-order iterator for the tree."""
|
||||
"""Return a post-order iterator for the tree."""
|
||||
for child in self.children:
|
||||
for node in child.post_order():
|
||||
yield node
|
||||
yield self
|
||||
|
||||
def pre_order(self):
|
||||
"""Returns a pre-order iterator for the tree."""
|
||||
"""Return a pre-order iterator for the tree."""
|
||||
yield self
|
||||
for child in self.children:
|
||||
for node in child.post_order():
|
||||
yield node
|
||||
|
||||
def set_prefix(self, prefix):
|
||||
"""Sets the prefix for the node.
|
||||
"""
|
||||
Set the prefix for the node.
|
||||
|
||||
This passes the responsibility on to the first child.
|
||||
"""
|
||||
|
@ -265,7 +285,8 @@ class Node(Base):
|
|||
self.children[0].set_prefix(prefix)
|
||||
|
||||
def get_prefix(self):
|
||||
"""Returns the prefix for the node.
|
||||
"""
|
||||
Return the prefix for the node.
|
||||
|
||||
This passes the call on to the first child.
|
||||
"""
|
||||
|
@ -274,23 +295,29 @@ class Node(Base):
|
|||
return self.children[0].get_prefix()
|
||||
|
||||
def set_child(self, i, child):
|
||||
"""Equivalent to 'node.children[i] = child'. This method also sets the
|
||||
child's parent attribute appropriately."""
|
||||
"""
|
||||
Equivalent to 'node.children[i] = child'. This method also sets the
|
||||
child's parent attribute appropriately.
|
||||
"""
|
||||
child.parent = self
|
||||
self.children[i].parent = None
|
||||
self.children[i] = child
|
||||
self.changed()
|
||||
|
||||
def insert_child(self, i, child):
|
||||
"""Equivalent to 'node.children.insert(i, child)'. This method also
|
||||
sets the child's parent attribute appropriately."""
|
||||
"""
|
||||
Equivalent to 'node.children.insert(i, child)'. This method also sets
|
||||
the child's parent attribute appropriately.
|
||||
"""
|
||||
child.parent = self
|
||||
self.children.insert(i, child)
|
||||
self.changed()
|
||||
|
||||
def append_child(self, child):
|
||||
"""Equivalent to 'node.children.append(child)'. This method also
|
||||
sets the child's parent attribute appropriately."""
|
||||
"""
|
||||
Equivalent to 'node.children.append(child)'. This method also sets the
|
||||
child's parent attribute appropriately.
|
||||
"""
|
||||
child.parent = self
|
||||
self.children.append(child)
|
||||
self.changed()
|
||||
|
@ -306,10 +333,11 @@ class Leaf(Base):
|
|||
column = 0 # Column where this token tarts in the input
|
||||
|
||||
def __init__(self, type, value, context=None, prefix=None):
|
||||
"""Initializer.
|
||||
"""
|
||||
Initializer.
|
||||
|
||||
Takes a type constant (a token number < 256), a string value,
|
||||
and an optional context keyword argument.
|
||||
Takes a type constant (a token number < 256), a string value, and an
|
||||
optional context keyword argument.
|
||||
"""
|
||||
assert 0 <= type < 256, type
|
||||
if context is not None:
|
||||
|
@ -320,51 +348,53 @@ class Leaf(Base):
|
|||
self.prefix = prefix
|
||||
|
||||
def __repr__(self):
|
||||
"""Returns a canonical string representation."""
|
||||
"""Return a canonical string representation."""
|
||||
return "%s(%r, %r)" % (self.__class__.__name__,
|
||||
self.type,
|
||||
self.value)
|
||||
|
||||
def __str__(self):
|
||||
"""Returns a pretty string representation.
|
||||
"""
|
||||
Return a pretty string representation.
|
||||
|
||||
This reproduces the input source exactly.
|
||||
"""
|
||||
return self.prefix + str(self.value)
|
||||
|
||||
def _eq(self, other):
|
||||
"""Compares two nodes for equality."""
|
||||
"""Compare two nodes for equality."""
|
||||
return (self.type, self.value) == (other.type, other.value)
|
||||
|
||||
def clone(self):
|
||||
"""Returns a cloned (deep) copy of self."""
|
||||
"""Return a cloned (deep) copy of self."""
|
||||
return Leaf(self.type, self.value,
|
||||
(self.prefix, (self.lineno, self.column)))
|
||||
|
||||
def post_order(self):
|
||||
"""Returns a post-order iterator for the tree."""
|
||||
"""Return a post-order iterator for the tree."""
|
||||
yield self
|
||||
|
||||
def pre_order(self):
|
||||
"""Returns a pre-order iterator for the tree."""
|
||||
"""Return a pre-order iterator for the tree."""
|
||||
yield self
|
||||
|
||||
def set_prefix(self, prefix):
|
||||
"""Sets the prefix for the node."""
|
||||
"""Set the prefix for the node."""
|
||||
self.changed()
|
||||
self.prefix = prefix
|
||||
|
||||
def get_prefix(self):
|
||||
"""Returns the prefix for the node."""
|
||||
"""Return the prefix for the node."""
|
||||
return self.prefix
|
||||
|
||||
|
||||
def convert(gr, raw_node):
|
||||
"""Converts raw node information to a Node or Leaf instance.
|
||||
"""
|
||||
Convert raw node information to a Node or Leaf instance.
|
||||
|
||||
This is passed to the parser driver which calls it whenever a
|
||||
reduction of a grammar rule produces a new complete node, so that
|
||||
the tree is build strictly bottom-up.
|
||||
This is passed to the parser driver which calls it whenever a reduction of a
|
||||
grammar rule produces a new complete node, so that the tree is build
|
||||
strictly bottom-up.
|
||||
"""
|
||||
type, value, context, children = raw_node
|
||||
if children or type in gr.number2symbol:
|
||||
|
@ -379,7 +409,8 @@ def convert(gr, raw_node):
|
|||
|
||||
class BasePattern(object):
|
||||
|
||||
"""A pattern is a tree matching pattern.
|
||||
"""
|
||||
A pattern is a tree matching pattern.
|
||||
|
||||
It looks for a specific node type (token or symbol), and
|
||||
optionally for a specific content.
|
||||
|
@ -409,14 +440,16 @@ class BasePattern(object):
|
|||
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
|
||||
|
||||
def optimize(self):
|
||||
"""A subclass can define this as a hook for optimizations.
|
||||
"""
|
||||
A subclass can define this as a hook for optimizations.
|
||||
|
||||
Returns either self or another node with the same effect.
|
||||
"""
|
||||
return self
|
||||
|
||||
def match(self, node, results=None):
|
||||
"""Does this pattern exactly match a node?
|
||||
"""
|
||||
Does this pattern exactly match a node?
|
||||
|
||||
Returns True if it matches, False if not.
|
||||
|
||||
|
@ -440,7 +473,8 @@ class BasePattern(object):
|
|||
return True
|
||||
|
||||
def match_seq(self, nodes, results=None):
|
||||
"""Does this pattern exactly match a sequence of nodes?
|
||||
"""
|
||||
Does this pattern exactly match a sequence of nodes?
|
||||
|
||||
Default implementation for non-wildcard patterns.
|
||||
"""
|
||||
|
@ -449,7 +483,8 @@ class BasePattern(object):
|
|||
return self.match(nodes[0], results)
|
||||
|
||||
def generate_matches(self, nodes):
|
||||
"""Generator yielding all matches for this pattern.
|
||||
"""
|
||||
Generator yielding all matches for this pattern.
|
||||
|
||||
Default implementation for non-wildcard patterns.
|
||||
"""
|
||||
|
@ -461,7 +496,8 @@ class BasePattern(object):
|
|||
class LeafPattern(BasePattern):
|
||||
|
||||
def __init__(self, type=None, content=None, name=None):
|
||||
"""Initializer. Takes optional type, content, and name.
|
||||
"""
|
||||
Initializer. Takes optional type, content, and name.
|
||||
|
||||
The type, if given must be a token type (< 256). If not given,
|
||||
this matches any *leaf* node; the content may still be required.
|
||||
|
@ -486,7 +522,8 @@ class LeafPattern(BasePattern):
|
|||
return BasePattern.match(self, node, results)
|
||||
|
||||
def _submatch(self, node, results=None):
|
||||
"""Match the pattern's content to the node's children.
|
||||
"""
|
||||
Match the pattern's content to the node's children.
|
||||
|
||||
This assumes the node type matches and self.content is not None.
|
||||
|
||||
|
@ -505,7 +542,8 @@ class NodePattern(BasePattern):
|
|||
wildcards = False
|
||||
|
||||
def __init__(self, type=None, content=None, name=None):
|
||||
"""Initializer. Takes optional type, content, and name.
|
||||
"""
|
||||
Initializer. Takes optional type, content, and name.
|
||||
|
||||
The type, if given, must be a symbol type (>= 256). If the
|
||||
type is None this matches *any* single node (leaf or not),
|
||||
|
@ -533,7 +571,8 @@ class NodePattern(BasePattern):
|
|||
self.name = name
|
||||
|
||||
def _submatch(self, node, results=None):
|
||||
"""Match the pattern's content to the node's children.
|
||||
"""
|
||||
Match the pattern's content to the node's children.
|
||||
|
||||
This assumes the node type matches and self.content is not None.
|
||||
|
||||
|
@ -561,7 +600,8 @@ class NodePattern(BasePattern):
|
|||
|
||||
class WildcardPattern(BasePattern):
|
||||
|
||||
"""A wildcard pattern can match zero or more nodes.
|
||||
"""
|
||||
A wildcard pattern can match zero or more nodes.
|
||||
|
||||
This has all the flexibility needed to implement patterns like:
|
||||
|
||||
|
@ -573,7 +613,8 @@ class WildcardPattern(BasePattern):
|
|||
"""
|
||||
|
||||
def __init__(self, content=None, min=0, max=HUGE, name=None):
|
||||
"""Initializer.
|
||||
"""
|
||||
Initializer.
|
||||
|
||||
Args:
|
||||
content: optional sequence of subsequences of patterns;
|
||||
|
@ -641,7 +682,8 @@ class WildcardPattern(BasePattern):
|
|||
return False
|
||||
|
||||
def generate_matches(self, nodes):
|
||||
"""Generator yielding matches for a sequence of nodes.
|
||||
"""
|
||||
Generator yielding matches for a sequence of nodes.
|
||||
|
||||
Args:
|
||||
nodes: sequence of nodes
|
||||
|
@ -744,7 +786,8 @@ class WildcardPattern(BasePattern):
|
|||
class NegatedPattern(BasePattern):
|
||||
|
||||
def __init__(self, content=None):
|
||||
"""Initializer.
|
||||
"""
|
||||
Initializer.
|
||||
|
||||
The argument is either a pattern or None. If it is None, this
|
||||
only matches an empty sequence (effectively '$' in regex
|
||||
|
@ -776,7 +819,8 @@ class NegatedPattern(BasePattern):
|
|||
|
||||
|
||||
def generate_matches(patterns, nodes):
|
||||
"""Generator yielding matches for a sequence of patterns and nodes.
|
||||
"""
|
||||
Generator yielding matches for a sequence of patterns and nodes.
|
||||
|
||||
Args:
|
||||
patterns: a sequence of patterns
|
||||
|
|
|
@ -506,6 +506,63 @@ class RefactoringTool(object):
|
|||
yield ""
|
||||
|
||||
|
||||
class MultiprocessingUnsupported(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MultiprocessRefactoringTool(RefactoringTool):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
|
||||
self.queue = None
|
||||
|
||||
def refactor(self, items, write=False, doctests_only=False,
|
||||
num_processes=1):
|
||||
if num_processes == 1:
|
||||
return super(MultiprocessRefactoringTool, self).refactor(
|
||||
items, write, doctests_only)
|
||||
try:
|
||||
import multiprocessing
|
||||
except ImportError:
|
||||
raise MultiprocessingUnsupported
|
||||
if self.queue is not None:
|
||||
raise RuntimeError("already doing multiple processes")
|
||||
self.queue = multiprocessing.JoinableQueue()
|
||||
processes = [multiprocessing.Process(target=self._child)
|
||||
for i in xrange(num_processes)]
|
||||
try:
|
||||
for p in processes:
|
||||
p.start()
|
||||
super(MultiprocessRefactoringTool, self).refactor(items, write,
|
||||
doctests_only)
|
||||
finally:
|
||||
self.queue.join()
|
||||
for i in xrange(num_processes):
|
||||
self.queue.put(None)
|
||||
for p in processes:
|
||||
if p.is_alive():
|
||||
p.join()
|
||||
self.queue = None
|
||||
|
||||
def _child(self):
|
||||
task = self.queue.get()
|
||||
while task is not None:
|
||||
args, kwargs = task
|
||||
try:
|
||||
super(MultiprocessRefactoringTool, self).refactor_file(
|
||||
*args, **kwargs)
|
||||
finally:
|
||||
self.queue.task_done()
|
||||
task = self.queue.get()
|
||||
|
||||
def refactor_file(self, *args, **kwargs):
|
||||
if self.queue is not None:
|
||||
self.queue.put((args, kwargs))
|
||||
else:
|
||||
return super(MultiprocessRefactoringTool, self).refactor_file(
|
||||
*args, **kwargs)
|
||||
|
||||
|
||||
def diff_texts(a, b, filename):
|
||||
"""Return a unified diff of two strings."""
|
||||
a = a.splitlines()
|
||||
|
|
|
@ -3404,6 +3404,18 @@ class Test_itertools_imports(FixerTestCase):
|
|||
a = "from itertools import bar as bang"
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import izip as _zip, imap, bar"
|
||||
a = "from itertools import bar"
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import imap as _map"
|
||||
a = ""
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import imap as _map, izip as _zip"
|
||||
a = ""
|
||||
self.check(b, a)
|
||||
|
||||
s = "from itertools import bar as bang"
|
||||
self.unchanged(s)
|
||||
|
||||
|
|
|
@ -306,36 +306,36 @@ class TestNodes(support.TestCase):
|
|||
n2 = pytree.Node(1000, [])
|
||||
p1 = pytree.Node(1000, [n1, n2])
|
||||
|
||||
self.failUnless(n1.get_next_sibling() is n2)
|
||||
self.assertEqual(n2.get_next_sibling(), None)
|
||||
self.assertEqual(p1.get_next_sibling(), None)
|
||||
self.failUnless(n1.next_sibling is n2)
|
||||
self.assertEqual(n2.next_sibling, None)
|
||||
self.assertEqual(p1.next_sibling, None)
|
||||
|
||||
def testLeafNextSibling(self):
|
||||
l1 = pytree.Leaf(100, "a")
|
||||
l2 = pytree.Leaf(100, "b")
|
||||
p1 = pytree.Node(1000, [l1, l2])
|
||||
|
||||
self.failUnless(l1.get_next_sibling() is l2)
|
||||
self.assertEqual(l2.get_next_sibling(), None)
|
||||
self.assertEqual(p1.get_next_sibling(), None)
|
||||
self.failUnless(l1.next_sibling is l2)
|
||||
self.assertEqual(l2.next_sibling, None)
|
||||
self.assertEqual(p1.next_sibling, None)
|
||||
|
||||
def testNodePrevSibling(self):
|
||||
n1 = pytree.Node(1000, [])
|
||||
n2 = pytree.Node(1000, [])
|
||||
p1 = pytree.Node(1000, [n1, n2])
|
||||
|
||||
self.failUnless(n2.get_prev_sibling() is n1)
|
||||
self.assertEqual(n1.get_prev_sibling(), None)
|
||||
self.assertEqual(p1.get_prev_sibling(), None)
|
||||
self.failUnless(n2.prev_sibling is n1)
|
||||
self.assertEqual(n1.prev_sibling, None)
|
||||
self.assertEqual(p1.prev_sibling, None)
|
||||
|
||||
def testLeafPrevSibling(self):
|
||||
l1 = pytree.Leaf(100, "a")
|
||||
l2 = pytree.Leaf(100, "b")
|
||||
p1 = pytree.Node(1000, [l1, l2])
|
||||
|
||||
self.failUnless(l2.get_prev_sibling() is l1)
|
||||
self.assertEqual(l1.get_prev_sibling(), None)
|
||||
self.assertEqual(p1.get_prev_sibling(), None)
|
||||
self.failUnless(l2.prev_sibling is l1)
|
||||
self.assertEqual(l1.prev_sibling, None)
|
||||
self.assertEqual(p1.prev_sibling, None)
|
||||
|
||||
|
||||
class TestPatterns(support.TestCase):
|
||||
|
|
Loading…
Reference in New Issue