mirror of https://github.com/python/cpython
Merged revisions 67428 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk ................ r67428 | benjamin.peterson | 2008-11-28 16:12:14 -0600 (Fri, 28 Nov 2008) | 57 lines Merged revisions 67384,67386-67387,67389-67390,67392,67399-67400,67403-67405,67426 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r67384 | benjamin.peterson | 2008-11-25 16:13:31 -0600 (Tue, 25 Nov 2008) | 4 lines don't duplicate calls to start_tree() RefactoringTool.pre_order values now holds a list of the fixers while pre_order_mapping holds the dict. ........ r67386 | benjamin.peterson | 2008-11-25 16:44:52 -0600 (Tue, 25 Nov 2008) | 1 line #4423 fix_imports was still replacing usage of a module if attributes were being used ........ r67387 | benjamin.peterson | 2008-11-25 16:47:54 -0600 (Tue, 25 Nov 2008) | 1 line fix broken test ........ r67389 | benjamin.peterson | 2008-11-25 17:13:17 -0600 (Tue, 25 Nov 2008) | 1 line remove compatibility code; we only cater to 2.5+ ........ r67390 | benjamin.peterson | 2008-11-25 22:03:36 -0600 (Tue, 25 Nov 2008) | 1 line fix #3994; the usage of changed imports was fixed in nested cases ........ r67392 | benjamin.peterson | 2008-11-26 11:11:40 -0600 (Wed, 26 Nov 2008) | 1 line simpilfy and comment fix_imports ........ r67399 | benjamin.peterson | 2008-11-26 11:47:03 -0600 (Wed, 26 Nov 2008) | 1 line remove more compatibility code ........ r67400 | benjamin.peterson | 2008-11-26 12:07:41 -0600 (Wed, 26 Nov 2008) | 1 line set svn:ignore ........ r67403 | benjamin.peterson | 2008-11-26 13:11:11 -0600 (Wed, 26 Nov 2008) | 1 line wrap import ........ r67404 | benjamin.peterson | 2008-11-26 13:29:49 -0600 (Wed, 26 Nov 2008) | 1 line build the fix_imports pattern in compile_pattern, so MAPPING can be changed and reflected in the pattern ........ r67405 | benjamin.peterson | 2008-11-26 14:01:24 -0600 (Wed, 26 Nov 2008) | 1 line stop ugly messages about runtime errors being from printed ........ r67426 | benjamin.peterson | 2008-11-28 16:01:40 -0600 (Fri, 28 Nov 2008) | 5 lines don't replace a module name if it is in the middle of a attribute lookup This fix also stops module names from being replaced if they are not in an attribute lookup. ........ ................
This commit is contained in:
parent
6e15860081
commit
e5c3ae3053
|
@ -7,12 +7,6 @@
|
||||||
import logging
|
import logging
|
||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
# Get a usable 'set' constructor
|
|
||||||
try:
|
|
||||||
set
|
|
||||||
except NameError:
|
|
||||||
from sets import Set as set
|
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .patcomp import PatternCompiler
|
from .patcomp import PatternCompiler
|
||||||
from . import pygram
|
from . import pygram
|
||||||
|
|
|
@ -153,30 +153,6 @@ def is_list(node):
|
||||||
and node.children[0].value == "["
|
and node.children[0].value == "["
|
||||||
and node.children[-1].value == "]")
|
and node.children[-1].value == "]")
|
||||||
|
|
||||||
###########################################################
|
|
||||||
### Common portability code. This allows fixers to do, eg,
|
|
||||||
### "from .util import set" and forget about it.
|
|
||||||
###########################################################
|
|
||||||
|
|
||||||
try:
|
|
||||||
any = any
|
|
||||||
except NameError:
|
|
||||||
def any(l):
|
|
||||||
for o in l:
|
|
||||||
if o:
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
set = set
|
|
||||||
except NameError:
|
|
||||||
from sets import Set as set
|
|
||||||
|
|
||||||
try:
|
|
||||||
reversed = reversed
|
|
||||||
except NameError:
|
|
||||||
def reversed(l):
|
|
||||||
return l[::-1]
|
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
||||||
### Misc
|
### Misc
|
||||||
|
|
|
@ -28,7 +28,7 @@ from .. import pytree
|
||||||
from .. import patcomp
|
from .. import patcomp
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot, set
|
from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot
|
||||||
from .. import fixer_util
|
from .. import fixer_util
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ The following cases will be converted:
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, reversed
|
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list
|
||||||
|
|
||||||
def find_excepts(nodes):
|
def find_excepts(nodes):
|
||||||
for i, n in enumerate(nodes):
|
for i, n in enumerate(nodes):
|
||||||
|
|
|
@ -1,9 +1,9 @@
|
||||||
"""Fix incompatible imports and module references."""
|
"""Fix incompatible imports and module references."""
|
||||||
# Author: Collin Winter
|
# Authors: Collin Winter, Nick Edds
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Name, attr_chain, any, set
|
from ..fixer_util import Name, attr_chain
|
||||||
|
|
||||||
MAPPING = {'StringIO': 'io',
|
MAPPING = {'StringIO': 'io',
|
||||||
'cStringIO': 'io',
|
'cStringIO': 'io',
|
||||||
|
@ -61,36 +61,49 @@ def alternates(members):
|
||||||
|
|
||||||
|
|
||||||
def build_pattern(mapping=MAPPING):
|
def build_pattern(mapping=MAPPING):
|
||||||
mod_list = ' | '.join(["module='" + key + "'" for key in mapping.keys()])
|
mod_list = ' | '.join(["module_name='%s'" % key for key in mapping])
|
||||||
mod_name_list = ' | '.join(["module_name='" + key + "'" for key in mapping.keys()])
|
bare_names = alternates(mapping.keys())
|
||||||
yield """import_name< 'import' ((%s)
|
|
||||||
|
yield """name_import=import_name< 'import' ((%s)
|
||||||
| dotted_as_names< any* (%s) any* >) >
|
| dotted_as_names< any* (%s) any* >) >
|
||||||
""" % (mod_list, mod_list)
|
""" % (mod_list, mod_list)
|
||||||
yield """import_from< 'from' (%s) 'import' ['(']
|
yield """import_from< 'from' (%s) 'import' ['(']
|
||||||
( any | import_as_name< any 'as' any > |
|
( any | import_as_name< any 'as' any > |
|
||||||
import_as_names< any* >) [')'] >
|
import_as_names< any* >) [')'] >
|
||||||
""" % mod_name_list
|
""" % mod_list
|
||||||
yield """import_name< 'import'
|
yield """import_name< 'import'
|
||||||
dotted_as_name< (%s) 'as' any > >
|
dotted_as_name< (%s) 'as' any > >
|
||||||
""" % mod_name_list
|
""" % mod_list
|
||||||
# Find usages of module members in code e.g. urllib.foo(bar)
|
|
||||||
yield """power< (%s)
|
# Find usages of module members in code e.g. thread.foo(bar)
|
||||||
trailer<'.' any > any* >
|
yield "power< bare_with_attr=(%s) trailer<'.' any > any* >" % bare_names
|
||||||
""" % mod_name_list
|
|
||||||
yield """bare_name=%s""" % alternates(mapping.keys())
|
|
||||||
|
|
||||||
class FixImports(fixer_base.BaseFix):
|
class FixImports(fixer_base.BaseFix):
|
||||||
PATTERN = "|".join(build_pattern())
|
|
||||||
order = "pre" # Pre-order tree traversal
|
order = "pre" # Pre-order tree traversal
|
||||||
|
|
||||||
|
# This is overridden in fix_imports2.
|
||||||
mapping = MAPPING
|
mapping = MAPPING
|
||||||
|
|
||||||
# Don't match the node if it's within another match
|
def build_pattern(self):
|
||||||
|
return "|".join(build_pattern(self.mapping))
|
||||||
|
|
||||||
|
def compile_pattern(self):
|
||||||
|
# We override this, so MAPPING can be pragmatically altered and the
|
||||||
|
# changes will be reflected in PATTERN.
|
||||||
|
self.PATTERN = self.build_pattern()
|
||||||
|
super(FixImports, self).compile_pattern()
|
||||||
|
|
||||||
|
# Don't match the node if it's within another match.
|
||||||
def match(self, node):
|
def match(self, node):
|
||||||
match = super(FixImports, self).match
|
match = super(FixImports, self).match
|
||||||
results = match(node)
|
results = match(node)
|
||||||
if results:
|
if results:
|
||||||
if any([match(obj) for obj in attr_chain(node, "parent")]):
|
# Module usage could be in the trailier of an attribute lookup, so
|
||||||
|
# we might have nested matches when "bare_with_attr" is present.
|
||||||
|
if "bare_with_attr" not in results and \
|
||||||
|
any([match(obj) for obj in attr_chain(node, "parent")]):
|
||||||
return False
|
return False
|
||||||
return results
|
return results
|
||||||
return False
|
return False
|
||||||
|
@ -100,20 +113,17 @@ class FixImports(fixer_base.BaseFix):
|
||||||
self.replace = {}
|
self.replace = {}
|
||||||
|
|
||||||
def transform(self, node, results):
|
def transform(self, node, results):
|
||||||
import_mod = results.get("module")
|
import_mod = results.get("module_name")
|
||||||
mod_name = results.get("module_name")
|
|
||||||
bare_name = results.get("bare_name")
|
|
||||||
|
|
||||||
if import_mod or mod_name:
|
|
||||||
new_name = self.mapping[(import_mod or mod_name).value]
|
|
||||||
|
|
||||||
if import_mod:
|
if import_mod:
|
||||||
|
new_name = self.mapping[(import_mod or mod_name).value]
|
||||||
|
if "name_import" in results:
|
||||||
|
# If it's not a "from x import x, y" or "import x as y" import,
|
||||||
|
# marked its usage to be replaced.
|
||||||
self.replace[import_mod.value] = new_name
|
self.replace[import_mod.value] = new_name
|
||||||
import_mod.replace(Name(new_name, prefix=import_mod.get_prefix()))
|
import_mod.replace(Name(new_name, prefix=import_mod.get_prefix()))
|
||||||
elif mod_name:
|
else:
|
||||||
mod_name.replace(Name(new_name, prefix=mod_name.get_prefix()))
|
# Replace usage of the module.
|
||||||
elif bare_name:
|
bare_name = results["bare_with_attr"][0]
|
||||||
bare_name = bare_name[0]
|
|
||||||
new_name = self.replace.get(bare_name.value)
|
new_name = self.replace.get(bare_name.value)
|
||||||
if new_name:
|
if new_name:
|
||||||
bare_name.replace(Name(new_name, prefix=bare_name.get_prefix()))
|
bare_name.replace(Name(new_name, prefix=bare_name.get_prefix()))
|
||||||
|
|
|
@ -10,7 +10,6 @@ MAPPING = {
|
||||||
|
|
||||||
|
|
||||||
class FixImports2(fix_imports.FixImports):
|
class FixImports2(fix_imports.FixImports):
|
||||||
PATTERN = "|".join((fix_imports.build_pattern(MAPPING)))
|
|
||||||
|
|
||||||
order = "post"
|
order = "post"
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from ..pygram import python_symbols as syms
|
from ..pygram import python_symbols as syms
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Name, Call, find_binding, any
|
from ..fixer_util import Name, Call, find_binding
|
||||||
|
|
||||||
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Number, set
|
from ..fixer_util import Number
|
||||||
|
|
||||||
|
|
||||||
class FixNumliterals(fixer_base.BaseFix):
|
class FixNumliterals(fixer_base.BaseFix):
|
||||||
|
|
|
@ -8,7 +8,7 @@ Fixes:
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Name, attr_chain, any, set
|
from ..fixer_util import Name, attr_chain
|
||||||
|
|
||||||
MAPPING = {"sys": {"maxint" : "maxsize"},
|
MAPPING = {"sys": {"maxint" : "maxsize"},
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
# Local imports
|
# Local imports
|
||||||
from .fix_imports import alternates, FixImports
|
from .fix_imports import alternates, FixImports
|
||||||
from .. import fixer_base
|
from .. import fixer_base
|
||||||
from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain, any, set
|
from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain
|
||||||
|
|
||||||
MAPPING = {'urllib': [
|
MAPPING = {'urllib': [
|
||||||
('urllib.request',
|
('urllib.request',
|
||||||
|
@ -65,7 +65,9 @@ def build_pattern():
|
||||||
|
|
||||||
|
|
||||||
class FixUrllib(FixImports):
|
class FixUrllib(FixImports):
|
||||||
PATTERN = "|".join(build_pattern())
|
|
||||||
|
def build_pattern(self):
|
||||||
|
return "|".join(build_pattern())
|
||||||
|
|
||||||
def transform_import(self, node, results):
|
def transform_import(self, node, results):
|
||||||
"""Transform for the basic import case. Replaces the old
|
"""Transform for the basic import case. Replaces the old
|
||||||
|
|
|
@ -10,12 +10,6 @@ how this parsing engine works.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Get a usable 'set' constructor
|
|
||||||
try:
|
|
||||||
set
|
|
||||||
except NameError:
|
|
||||||
from sets import Set as set
|
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import token
|
from . import token
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,9 @@ There's also a pattern matching implementation here.
|
||||||
|
|
||||||
__author__ = "Guido van Rossum <guido@python.org>"
|
__author__ = "Guido van Rossum <guido@python.org>"
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
|
||||||
HUGE = 0x7FFFFFFF # maximum repeat count, default max
|
HUGE = 0x7FFFFFFF # maximum repeat count, default max
|
||||||
|
|
||||||
|
@ -655,6 +658,11 @@ class WildcardPattern(BasePattern):
|
||||||
elif self.name == "bare_name":
|
elif self.name == "bare_name":
|
||||||
yield self._bare_name_matches(nodes)
|
yield self._bare_name_matches(nodes)
|
||||||
else:
|
else:
|
||||||
|
# The reason for this is that hitting the recursion limit usually
|
||||||
|
# results in some ugly messages about how RuntimeErrors are being
|
||||||
|
# ignored.
|
||||||
|
save_stderr = sys.stderr
|
||||||
|
sys.stderr = StringIO()
|
||||||
try:
|
try:
|
||||||
for count, r in self._recursive_matches(nodes, 0):
|
for count, r in self._recursive_matches(nodes, 0):
|
||||||
if self.name:
|
if self.name:
|
||||||
|
@ -667,6 +675,8 @@ class WildcardPattern(BasePattern):
|
||||||
if self.name:
|
if self.name:
|
||||||
r[self.name] = nodes[:count]
|
r[self.name] = nodes[:count]
|
||||||
yield count, r
|
yield count, r
|
||||||
|
finally:
|
||||||
|
sys.stderr = save_stderr
|
||||||
|
|
||||||
def _iterative_matches(self, nodes):
|
def _iterative_matches(self, nodes):
|
||||||
"""Helper to iteratively yield the matches."""
|
"""Helper to iteratively yield the matches."""
|
||||||
|
|
|
@ -123,8 +123,8 @@ class RefactoringTool(object):
|
||||||
logger=self.logger)
|
logger=self.logger)
|
||||||
self.pre_order, self.post_order = self.get_fixers()
|
self.pre_order, self.post_order = self.get_fixers()
|
||||||
|
|
||||||
self.pre_order = get_headnode_dict(self.pre_order)
|
self.pre_order_mapping = get_headnode_dict(self.pre_order)
|
||||||
self.post_order = get_headnode_dict(self.post_order)
|
self.post_order_mapping = get_headnode_dict(self.post_order)
|
||||||
|
|
||||||
self.files = [] # List of files that were or should be modified
|
self.files = [] # List of files that were or should be modified
|
||||||
|
|
||||||
|
@ -290,13 +290,12 @@ class RefactoringTool(object):
|
||||||
# Two calls to chain are required because pre_order.values()
|
# Two calls to chain are required because pre_order.values()
|
||||||
# will be a list of lists of fixers:
|
# will be a list of lists of fixers:
|
||||||
# [[<fixer ...>, <fixer ...>], [<fixer ...>]]
|
# [[<fixer ...>, <fixer ...>], [<fixer ...>]]
|
||||||
all_fixers = chain(chain(*self.pre_order.values()),\
|
all_fixers = chain(self.pre_order, self.post_order)
|
||||||
chain(*self.post_order.values()))
|
|
||||||
for fixer in all_fixers:
|
for fixer in all_fixers:
|
||||||
fixer.start_tree(tree, name)
|
fixer.start_tree(tree, name)
|
||||||
|
|
||||||
self.traverse_by(self.pre_order, tree.pre_order())
|
self.traverse_by(self.pre_order_mapping, tree.pre_order())
|
||||||
self.traverse_by(self.post_order, tree.post_order())
|
self.traverse_by(self.post_order_mapping, tree.post_order())
|
||||||
|
|
||||||
for fixer in all_fixers:
|
for fixer in all_fixers:
|
||||||
fixer.finish_tree(tree, name)
|
fixer.finish_tree(tree, name)
|
||||||
|
|
|
@ -15,10 +15,7 @@ from itertools import chain
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pygram
|
from lib2to3 import pygram, pytree, refactor, fixer_util
|
||||||
from .. import pytree
|
|
||||||
from .. import refactor
|
|
||||||
from .. import fixer_util
|
|
||||||
|
|
||||||
|
|
||||||
class FixerTestCase(support.TestCase):
|
class FixerTestCase(support.TestCase):
|
||||||
|
@ -30,9 +27,8 @@ class FixerTestCase(support.TestCase):
|
||||||
self.fixer_log = []
|
self.fixer_log = []
|
||||||
self.filename = "<string>"
|
self.filename = "<string>"
|
||||||
|
|
||||||
for order in (self.refactor.pre_order.values(),\
|
for fixer in chain(self.refactor.pre_order,
|
||||||
self.refactor.post_order.values()):
|
self.refactor.post_order):
|
||||||
for fixer in chain(*order):
|
|
||||||
fixer.log = self.fixer_log
|
fixer.log = self.fixer_log
|
||||||
|
|
||||||
def _check(self, before, after):
|
def _check(self, before, after):
|
||||||
|
@ -1488,6 +1484,44 @@ class Test_imports(FixerTestCase):
|
||||||
""" % (new, new)
|
""" % (new, new)
|
||||||
self.check(b, a)
|
self.check(b, a)
|
||||||
|
|
||||||
|
b = """
|
||||||
|
from %s import x
|
||||||
|
%s = 23
|
||||||
|
""" % (old, old)
|
||||||
|
a = """
|
||||||
|
from %s import x
|
||||||
|
%s = 23
|
||||||
|
""" % (new, old)
|
||||||
|
self.check(b, a)
|
||||||
|
|
||||||
|
s = """
|
||||||
|
def f():
|
||||||
|
%s.method()
|
||||||
|
""" % (old,)
|
||||||
|
self.unchanged(s)
|
||||||
|
|
||||||
|
# test nested usage
|
||||||
|
b = """
|
||||||
|
import %s
|
||||||
|
%s.bar(%s.foo)
|
||||||
|
""" % (old, old, old)
|
||||||
|
a = """
|
||||||
|
import %s
|
||||||
|
%s.bar(%s.foo)
|
||||||
|
""" % (new, new, new)
|
||||||
|
self.check(b, a)
|
||||||
|
|
||||||
|
b = """
|
||||||
|
import %s
|
||||||
|
x.%s
|
||||||
|
""" % (old, old)
|
||||||
|
a = """
|
||||||
|
import %s
|
||||||
|
x.%s
|
||||||
|
""" % (new, old)
|
||||||
|
self.check(b, a)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Test_imports2(Test_imports):
|
class Test_imports2(Test_imports):
|
||||||
fixer = "imports2"
|
fixer = "imports2"
|
||||||
|
|
|
@ -161,7 +161,7 @@ class TestRefactoringTool(unittest.TestCase):
|
||||||
self.assertEqual(len(rt.post_order), 0)
|
self.assertEqual(len(rt.post_order), 0)
|
||||||
|
|
||||||
rt = self.rt(explicit=["myfixes.fix_explicit"])
|
rt = self.rt(explicit=["myfixes.fix_explicit"])
|
||||||
for fix in rt.post_order[None]:
|
for fix in rt.post_order:
|
||||||
if isinstance(fix, FixExplicit):
|
if isinstance(fix, FixExplicit):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in New Issue