Merged revisions 64863,64868,64870,64942,65001-65002,65017-65018 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r64863 | brett.cannon | 2008-07-10 19:42:32 -0500 (Thu, 10 Jul 2008) | 1 line Add urlparse -> urllib.parse to fix_imports. ........ r64868 | brett.cannon | 2008-07-10 20:00:10 -0500 (Thu, 10 Jul 2008) | 1 line Add robotparser -> urllib.robotparser to fix_imports. ........ r64870 | brett.cannon | 2008-07-11 00:56:27 -0500 (Fri, 11 Jul 2008) | 6 lines Fix the fixers for the new dbm package. Had to create a new fixer (fix_imports2) which did fixes in post-order. This because ``import anydbm`` was being translated into ``import dbm`` which was then subsequently changed into ``import dbm.ndbm``; one transform too many. ........ r64942 | collin.winter | 2008-07-13 20:19:05 -0500 (Sun, 13 Jul 2008) | 1 line Add a comment explaining part of fix_imports.py ........ r65001 | brett.cannon | 2008-07-16 00:11:12 -0500 (Wed, 16 Jul 2008) | 2 lines Remove some extraneous whitespace. ........ r65002 | brett.cannon | 2008-07-16 00:12:04 -0500 (Wed, 16 Jul 2008) | 4 lines Implement a fixer for urllib(2). Thanks Nick Edds for the patch. ........ r65017 | benjamin.peterson | 2008-07-16 11:04:19 -0500 (Wed, 16 Jul 2008) | 1 line fix 2to3 in Python 2.6 ........ r65018 | benjamin.peterson | 2008-07-16 11:55:21 -0500 (Wed, 16 Jul 2008) | 1 line normalize whitespace ........
This commit is contained in:
parent
13e9d582fd
commit
699b09010f
|
@ -112,9 +112,9 @@ def FromImport(package_name, name_leafs):
|
|||
""" Return an import statement in the form:
|
||||
from package import name_leafs"""
|
||||
# XXX: May not handle dotted imports properly (eg, package_name='foo.bar')
|
||||
assert package_name == '.' or '.' not in package.name, "FromImport has "\
|
||||
"not been tested with dotted package names -- use at your own "\
|
||||
"peril!"
|
||||
#assert package_name == '.' or '.' not in package_name, "FromImport has "\
|
||||
# "not been tested with dotted package names -- use at your own "\
|
||||
# "peril!"
|
||||
|
||||
for leaf in name_leafs:
|
||||
# Pull the leaves out of their old tree
|
||||
|
|
|
@ -1,10 +1,4 @@
|
|||
"""Fix incompatible imports and module references.
|
||||
|
||||
Fixes:
|
||||
* StringIO -> io
|
||||
* cStringIO -> io
|
||||
* md5 -> hashlib
|
||||
"""
|
||||
"""Fix incompatible imports and module references."""
|
||||
# Author: Collin Winter
|
||||
|
||||
# Local imports
|
||||
|
@ -12,7 +6,7 @@ from .. import fixer_base
|
|||
from ..fixer_util import Name, attr_chain, any, set
|
||||
import __builtin__
|
||||
builtin_names = [name for name in dir(__builtin__)
|
||||
if name not in ("__name__", "__doc__")]
|
||||
if name not in ("__name__", "__doc__", "exec", "print")]
|
||||
|
||||
# XXX(alexandre): It would be possible to get the modules exports by fetching
|
||||
# XXX: their __all__ attribute. However, I fear that this would add an additional
|
||||
|
@ -155,8 +149,7 @@ MAPPING = {"StringIO": ("io", ["StringIO"]),
|
|||
'error', 'exit', 'exit_thread', 'get_ident',
|
||||
'interrupt_main', 'stack_size', 'start_new',
|
||||
'start_new_thread']),
|
||||
'whichdb': ('dbm', ['whichdb']),
|
||||
'anydbm': ('dbm', ['error', 'open']),
|
||||
# anydbm and whichdb are handed by fix_imports2.
|
||||
'dbhash': ('dbm.bsd', ['error', 'open']),
|
||||
'dumbdbm': ('dbm.dumb', ['error', 'open', '_Database']),
|
||||
'dbm': ('dbm.ndbm', ['error', 'open', 'library']),
|
||||
|
@ -253,25 +246,29 @@ MAPPING = {"StringIO": ("io", ["StringIO"]),
|
|||
'CGIHTTPServer': ('http.server',
|
||||
['CGIHTTPRequestHandler', 'executable',
|
||||
'nobody_uid', 'nobody']),
|
||||
'test.test_support': ('test.support',
|
||||
["Error", "TestFailed", "TestSkipped", "ResourceDenied",
|
||||
"import_module", "verbose", "use_resources",
|
||||
"max_memuse", "record_original_stdout",
|
||||
"get_original_stdout", "unload", "unlink", "rmtree",
|
||||
"forget", "is_resource_enabled", "requires",
|
||||
"find_unused_port", "bind_port",
|
||||
"fcmp", "is_jython", "TESTFN", "HOST",
|
||||
"FUZZ", "findfile", "verify", "vereq", "sortdict",
|
||||
"check_syntax_error", "open_urlresource", "WarningMessage",
|
||||
"catch_warning", "CleanImport", "EnvironmentVarGuard",
|
||||
"TransientResource", "captured_output", "captured_stdout",
|
||||
"TransientResource", "transient_internet", "run_with_locale",
|
||||
"set_memlimit", "bigmemtest", "bigaddrspacetest",
|
||||
"BasicTestRunner", "run_unittest", "run_doctest",
|
||||
"threading_setup", "threading_cleanup", "reap_children"]),
|
||||
# 'test.test_support': ('test.support',
|
||||
# ["Error", "TestFailed", "TestSkipped", "ResourceDenied",
|
||||
# "import_module", "verbose", "use_resources",
|
||||
# "max_memuse", "record_original_stdout",
|
||||
# "get_original_stdout", "unload", "unlink", "rmtree",
|
||||
# "forget", "is_resource_enabled", "requires",
|
||||
# "find_unused_port", "bind_port",
|
||||
# "fcmp", "is_jython", "TESTFN", "HOST",
|
||||
# "FUZZ", "findfile", "verify", "vereq", "sortdict",
|
||||
# "check_syntax_error", "open_urlresource", "WarningMessage",
|
||||
# "catch_warning", "CleanImport", "EnvironmentVarGuard",
|
||||
# "TransientResource", "captured_output", "captured_stdout",
|
||||
# "TransientResource", "transient_internet", "run_with_locale",
|
||||
# "set_memlimit", "bigmemtest", "bigaddrspacetest",
|
||||
# "BasicTestRunner", "run_unittest", "run_doctest",
|
||||
# "threading_setup", "threading_cleanup", "reap_children"]),
|
||||
'commands': ('subprocess', ['getstatusoutput', 'getoutput']),
|
||||
'UserString' : ('collections', ['UserString']),
|
||||
'UserList' : ('collections', ['UserList']),
|
||||
'urlparse' : ('urllib.parse',
|
||||
['urlparse', 'urlunparse', 'urlsplit',
|
||||
'urlunsplit', 'urljoin', 'urldefrag']),
|
||||
'robotparser' : ('urllib.robotparser', ['RobotFileParser']),
|
||||
}
|
||||
|
||||
|
||||
|
@ -279,9 +276,9 @@ def alternates(members):
|
|||
return "(" + "|".join(map(repr, members)) + ")"
|
||||
|
||||
|
||||
def build_pattern():
|
||||
def build_pattern(mapping=MAPPING):
|
||||
bare = set()
|
||||
for old_module, (new_module, members) in MAPPING.items():
|
||||
for old_module, (new_module, members) in mapping.items():
|
||||
bare.add(old_module)
|
||||
bare.update(members)
|
||||
members = alternates(members)
|
||||
|
@ -297,6 +294,7 @@ def build_pattern():
|
|||
yield """import_name< 'import'
|
||||
dotted_as_name< module_name=%r 'as' any > >
|
||||
""" % old_module
|
||||
# Find usages of module members in code e.g. urllib.foo(bar)
|
||||
yield """power< module_name=%r trailer< '.' %s > any* >
|
||||
""" % (old_module, members)
|
||||
yield """bare_name=%s""" % alternates(bare)
|
||||
|
@ -307,6 +305,8 @@ class FixImports(fixer_base.BaseFix):
|
|||
|
||||
order = "pre" # Pre-order tree traversal
|
||||
|
||||
mapping = MAPPING
|
||||
|
||||
# Don't match the node if it's within another match
|
||||
def match(self, node):
|
||||
match = super(FixImports, self).match
|
||||
|
@ -328,7 +328,7 @@ class FixImports(fixer_base.BaseFix):
|
|||
star = results.get("star")
|
||||
|
||||
if import_mod or mod_name:
|
||||
new_name, members = MAPPING[(import_mod or mod_name).value]
|
||||
new_name, members = self.mapping[(import_mod or mod_name).value]
|
||||
|
||||
if import_mod:
|
||||
self.replace[import_mod.value] = new_name
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
"""Fix incompatible imports and module references that must be fixed after
|
||||
fix_imports."""
|
||||
from . import fix_imports
|
||||
|
||||
|
||||
MAPPING = {
|
||||
'whichdb': ('dbm', ['whichdb']),
|
||||
'anydbm': ('dbm', ['error', 'open']),
|
||||
}
|
||||
|
||||
|
||||
class FixImports2(fix_imports.FixImports):
|
||||
PATTERN = "|".join((fix_imports.build_pattern(MAPPING)))
|
||||
|
||||
order = "post"
|
||||
|
||||
mapping = MAPPING
|
|
@ -0,0 +1,175 @@
|
|||
"""Fix changes imports of urllib which are now incompatible.
|
||||
This is rather similar to fix_imports, but because of the more
|
||||
complex nature of the fixing for urllib, it has its own fixer.
|
||||
"""
|
||||
# Author: Nick Edds
|
||||
|
||||
# Local imports
|
||||
from .fix_imports import alternates, FixImports
|
||||
from .. import fixer_base
|
||||
from ..fixer_util import Name, Comma, FromImport, Newline, attr_chain, any, set
|
||||
|
||||
MAPPING = {'urllib': [
|
||||
('urllib.request',
|
||||
['URLOpener', 'FancyURLOpener', 'urlretrieve',
|
||||
'_urlopener', 'urlcleanup']),
|
||||
('urllib.parse',
|
||||
['quote', 'quote_plus', 'unquote', 'unquote_plus',
|
||||
'urlencode', 'pahtname2url', 'url2pathname']),
|
||||
('urllib.error',
|
||||
['ContentTooShortError'])],
|
||||
'urllib2' : [
|
||||
('urllib.request',
|
||||
['urlopen', 'install_opener', 'build_opener',
|
||||
'Request', 'OpenerDirector', 'BaseHandler',
|
||||
'HTTPDefaultErrorHandler', 'HTTPRedirectHandler',
|
||||
'HTTPCookieProcessor', 'ProxyHandler',
|
||||
'HTTPPasswordMgr',
|
||||
'HTTPPasswordMgrWithDefaultRealm',
|
||||
'AbstractBasicAuthHandler',
|
||||
'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
|
||||
'AbstractDigestAuthHandler',
|
||||
'HTTPDigestAuthHander', 'ProxyDigestAuthHandler',
|
||||
'HTTPHandler', 'HTTPSHandler', 'FileHandler',
|
||||
'FTPHandler', 'CacheFTPHandler',
|
||||
'UnknownHandler']),
|
||||
('urllib.error',
|
||||
['URLError', 'HTTPError'])],
|
||||
}
|
||||
|
||||
|
||||
# def alternates(members):
|
||||
# return "(" + "|".join(map(repr, members)) + ")"
|
||||
|
||||
|
||||
def build_pattern():
|
||||
bare = set()
|
||||
for old_module, changes in MAPPING.items():
|
||||
for change in changes:
|
||||
new_module, members = change
|
||||
members = alternates(members)
|
||||
yield """import_name< 'import' (module=%r
|
||||
| dotted_as_names< any* module=%r any* >) >
|
||||
""" % (old_module, old_module)
|
||||
yield """import_from< 'from' mod_member=%r 'import'
|
||||
( member=%s | import_as_name< member=%s 'as' any > |
|
||||
import_as_names< members=any* >) >
|
||||
""" % (old_module, members, members)
|
||||
yield """import_from< 'from' module_star=%r 'import' star='*' >
|
||||
""" % old_module
|
||||
yield """import_name< 'import'
|
||||
dotted_as_name< module_as=%r 'as' any > >
|
||||
""" % old_module
|
||||
yield """power< module_dot=%r trailer< '.' member=%s > any* >
|
||||
""" % (old_module, members)
|
||||
|
||||
|
||||
class FixUrllib(FixImports):
|
||||
PATTERN = "|".join(build_pattern())
|
||||
|
||||
def transform_import(self, node, results):
|
||||
"""Transform for the basic import case. Replaces the old
|
||||
import name with a comma separated list of its
|
||||
replacements.
|
||||
"""
|
||||
import_mod = results.get('module')
|
||||
pref = import_mod.get_prefix()
|
||||
|
||||
names = []
|
||||
|
||||
# create a Node list of the replacement modules
|
||||
for name in MAPPING[import_mod.value][:-1]:
|
||||
names.extend([Name(name[0], prefix=pref), Comma()])
|
||||
names.append(Name(MAPPING[import_mod.value][-1][0], prefix=pref))
|
||||
import_mod.replace(names)
|
||||
|
||||
def transform_member(self, node, results):
|
||||
"""Transform for imports of specific module elements. Replaces
|
||||
the module to be imported from with the appropriate new
|
||||
module.
|
||||
"""
|
||||
mod_member = results.get('mod_member')
|
||||
pref = mod_member.get_prefix()
|
||||
member = results.get('member')
|
||||
|
||||
# Simple case with only a single member being imported
|
||||
if member:
|
||||
# this may be a list of length one, or just a node
|
||||
if isinstance(member, list):
|
||||
member = member[0]
|
||||
new_name = None
|
||||
for change in MAPPING[mod_member.value]:
|
||||
if member.value in change[1]:
|
||||
new_name = change[0]
|
||||
break
|
||||
if new_name:
|
||||
mod_member.replace(Name(new_name, prefix=pref))
|
||||
else:
|
||||
self.cannot_convert(node,
|
||||
'This is an invalid module element')
|
||||
|
||||
# Multiple members being imported
|
||||
else:
|
||||
# a dictionary for replacements, order matters
|
||||
modules = []
|
||||
mod_dict = {}
|
||||
members = results.get('members')
|
||||
for member in members:
|
||||
member = member.value
|
||||
# we only care about the actual members
|
||||
if member != ',':
|
||||
for change in MAPPING[mod_member.value]:
|
||||
if member in change[1]:
|
||||
if mod_dict.has_key(change[0]):
|
||||
mod_dict[change[0]].append(member)
|
||||
else:
|
||||
mod_dict[change[0]] = [member]
|
||||
modules.append(change[0])
|
||||
|
||||
new_nodes = []
|
||||
for module in modules:
|
||||
elts = mod_dict[module]
|
||||
names = []
|
||||
for elt in elts[:-1]:
|
||||
names.extend([Name(elt, prefix=pref), Comma()])
|
||||
names.append(Name(elts[-1], prefix=pref))
|
||||
new_nodes.append(FromImport(module, names))
|
||||
if new_nodes:
|
||||
nodes = []
|
||||
for new_node in new_nodes[:-1]:
|
||||
nodes.extend([new_node, Newline()])
|
||||
nodes.append(new_nodes[-1])
|
||||
node.replace(nodes)
|
||||
else:
|
||||
self.cannot_convert(node, 'All module elements are invalid')
|
||||
|
||||
def transform_dot(self, node, results):
|
||||
"""Transform for calls to module members in code."""
|
||||
module_dot = results.get('module_dot')
|
||||
member = results.get('member')
|
||||
# this may be a list of length one, or just a node
|
||||
if isinstance(member, list):
|
||||
member = member[0]
|
||||
new_name = None
|
||||
for change in MAPPING[module_dot.value]:
|
||||
if member.value in change[1]:
|
||||
new_name = change[0]
|
||||
break
|
||||
if new_name:
|
||||
module_dot.replace(Name(new_name,
|
||||
prefix=module_dot.get_prefix()))
|
||||
else:
|
||||
self.cannot_convert(node, 'This is an invalid module element')
|
||||
|
||||
def transform(self, node, results):
|
||||
if results.get('module'):
|
||||
self.transform_import(node, results)
|
||||
elif results.get('mod_member'):
|
||||
self.transform_member(node, results)
|
||||
elif results.get('module_dot'):
|
||||
self.transform_dot(node, results)
|
||||
# Renaming and star imports are not supported for these modules.
|
||||
elif results.get('module_star'):
|
||||
self.cannot_convert(node, 'Cannot handle star imports.')
|
||||
elif results.get('module_as'):
|
||||
self.cannot_convert(node, 'This module is now multiple modules')
|
|
@ -11,6 +11,7 @@ except ImportError:
|
|||
# Python imports
|
||||
import unittest
|
||||
from itertools import chain
|
||||
from operator import itemgetter
|
||||
from os.path import dirname, pathsep
|
||||
|
||||
# Local imports
|
||||
|
@ -28,8 +29,10 @@ class Options:
|
|||
self.verbose = False
|
||||
|
||||
class FixerTestCase(support.TestCase):
|
||||
def setUp(self):
|
||||
options = Options(fix=[self.fixer], print_function=False)
|
||||
def setUp(self, fix_list=None):
|
||||
if not fix_list:
|
||||
fix_list = [self.fixer]
|
||||
options = Options(fix=fix_list, print_function=False)
|
||||
self.refactor = refactor.RefactoringTool("lib2to3/fixes", options)
|
||||
self.fixer_log = []
|
||||
self.filename = "<string>"
|
||||
|
@ -1494,6 +1497,95 @@ class Test_imports(FixerTestCase):
|
|||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_imports2(Test_imports):
|
||||
fixer = "imports2"
|
||||
from ..fixes.fix_imports2 import MAPPING as modules
|
||||
|
||||
|
||||
class Test_imports_fixer_order(Test_imports):
|
||||
|
||||
fixer = None
|
||||
|
||||
def setUp(self):
|
||||
Test_imports.setUp(self, ['imports', 'imports2'])
|
||||
from ..fixes.fix_imports2 import MAPPING as mapping2
|
||||
self.modules = mapping2.copy()
|
||||
from ..fixes.fix_imports import MAPPING as mapping1
|
||||
for key in ('dbhash', 'dumbdbm', 'dbm', 'gdbm'):
|
||||
self.modules[key] = mapping1[key]
|
||||
|
||||
|
||||
class Test_urllib(FixerTestCase):
|
||||
fixer = "urllib"
|
||||
from ..fixes.fix_urllib import MAPPING as modules
|
||||
|
||||
def test_import_module(self):
|
||||
for old, changes in self.modules.items():
|
||||
b = "import %s" % old
|
||||
a = "import %s" % ", ".join(map(itemgetter(0), changes))
|
||||
self.check(b, a)
|
||||
|
||||
def test_import_from(self):
|
||||
for old, changes in self.modules.items():
|
||||
all_members = []
|
||||
for new, members in changes:
|
||||
for member in members:
|
||||
all_members.append(member)
|
||||
b = "from %s import %s" % (old, member)
|
||||
a = "from %s import %s" % (new, member)
|
||||
self.check(b, a)
|
||||
|
||||
s = "from foo import %s" % member
|
||||
self.unchanged(s)
|
||||
|
||||
b = "from %s import %s" % (old, ", ".join(members))
|
||||
a = "from %s import %s" % (new, ", ".join(members))
|
||||
self.check(b, a)
|
||||
|
||||
s = "from foo import %s" % ", ".join(members)
|
||||
self.unchanged(s)
|
||||
|
||||
# test the breaking of a module into multiple replacements
|
||||
b = "from %s import %s" % (old, ", ".join(all_members))
|
||||
a = "\n".join(["from %s import %s" % (new, ", ".join(members))
|
||||
for (new, members) in changes])
|
||||
self.check(b, a)
|
||||
|
||||
def test_import_module_as(self):
|
||||
for old in self.modules:
|
||||
s = "import %s as foo" % old
|
||||
self.warns_unchanged(s, "This module is now multiple modules")
|
||||
|
||||
def test_import_from_as(self):
|
||||
for old, changes in self.modules.items():
|
||||
for new, members in changes:
|
||||
for member in members:
|
||||
b = "from %s import %s as foo_bar" % (old, member)
|
||||
a = "from %s import %s as foo_bar" % (new, member)
|
||||
self.check(b, a)
|
||||
|
||||
def test_star(self):
|
||||
for old in self.modules:
|
||||
s = "from %s import *" % old
|
||||
self.warns_unchanged(s, "Cannot handle star imports")
|
||||
|
||||
def test_import_module_usage(self):
|
||||
for old, changes in self.modules.items():
|
||||
for new, members in changes:
|
||||
for member in members:
|
||||
b = """
|
||||
import %s
|
||||
foo(%s.%s)
|
||||
""" % (old, old, member)
|
||||
a = """
|
||||
import %s
|
||||
foo(%s.%s)
|
||||
""" % (", ".join([n for (n, mems)
|
||||
in self.modules[old]]),
|
||||
new, member)
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_input(FixerTestCase):
|
||||
fixer = "input"
|
||||
|
||||
|
|
Loading…
Reference in New Issue