Merged revisions 63661,63666,63695,63711,63729,63769,63790,63880,63886 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r63661 | georg.brandl | 2008-05-26 05:26:20 -0500 (Mon, 26 May 2008) | 2 lines Add import fixes for dbm package. ........ r63666 | georg.brandl | 2008-05-26 05:49:09 -0500 (Mon, 26 May 2008) | 2 lines Add xmlrpc package fixes. ........ r63695 | georg.brandl | 2008-05-26 10:14:33 -0500 (Mon, 26 May 2008) | 2 lines Add fixer entries for http package. ........ r63711 | benjamin.peterson | 2008-05-26 13:43:51 -0500 (Mon, 26 May 2008) | 2 lines add import mapping for test.test_support -> test.support ........ r63729 | benjamin.peterson | 2008-05-26 16:31:03 -0500 (Mon, 26 May 2008) | 2 lines mapping for commands module -> subprocess ........ r63769 | brett.cannon | 2008-05-29 00:13:13 -0500 (Thu, 29 May 2008) | 1 line Fixer for UserString.UserString over to the collections module. ........ r63790 | brett.cannon | 2008-05-29 14:13:51 -0500 (Thu, 29 May 2008) | 4 lines Add a fixer for UserList. Closes issue #2878. Thanks to Quentin Gallet-Gilles for the patch. ........ r63880 | collin.winter | 2008-06-01 18:09:38 -0500 (Sun, 01 Jun 2008) | 6 lines Move lib2to3/fixes/{basefix,util}.py down to lib2to3/. This is step 1 of turning lib2to3/ into a general-purpose refactoring library, reusable by other projects. ........ r63886 | collin.winter | 2008-06-01 22:15:01 -0500 (Sun, 01 Jun 2008) | 5 lines Allow refactoring tools to specify a directory for fixer modules. This is step 2 of turning lib2to3/ into a general-purpose refactoring library, reusable by other projects. Step 1: r63880. ........
This commit is contained in:
parent
a0205d0a46
commit
e607823af5
|
@ -14,9 +14,9 @@ except NameError:
|
||||||
from sets import Set as set
|
from sets import Set as set
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..patcomp import PatternCompiler
|
from .patcomp import PatternCompiler
|
||||||
from .. import pygram
|
from . import pygram
|
||||||
from .util import does_tree_import
|
from .fixer_util import does_tree_import
|
||||||
|
|
||||||
class BaseFix(object):
|
class BaseFix(object):
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
# Author: Collin Winter
|
# Author: Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from .pgen2 import token
|
||||||
from ..pytree import Leaf, Node
|
from .pytree import Leaf, Node
|
||||||
from ..pygram import python_symbols as syms
|
from .pygram import python_symbols as syms
|
||||||
from .. import patcomp
|
from . import patcomp
|
||||||
|
|
||||||
|
|
||||||
###########################################################
|
###########################################################
|
|
@ -8,10 +8,10 @@ This converts apply(func, v, k) into (func)(*v, **k)."""
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Call, Comma
|
from ..fixer_util import Call, Comma
|
||||||
|
|
||||||
class FixApply(basefix.BaseFix):
|
class FixApply(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< 'apply'
|
power< 'apply'
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
# Author: Christian Heimes
|
# Author: Christian Heimes
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
class FixBasestring(basefix.BaseFix):
|
class FixBasestring(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = "'basestring'"
|
PATTERN = "'basestring'"
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
"""Fixer that changes buffer(...) into memoryview(...)."""
|
"""Fixer that changes buffer(...) into memoryview(...)."""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
|
|
||||||
class FixBuffer(basefix.BaseFix):
|
class FixBuffer(fixer_base.BaseFix):
|
||||||
|
|
||||||
explicit = True # The user must ask for this fixer
|
explicit = True # The user must ask for this fixer
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,10 @@ This converts callable(obj) into hasattr(obj, '__call__')."""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Call, Name, String
|
from ..fixer_util import Call, Name, String
|
||||||
|
|
||||||
class FixCallable(basefix.BaseFix):
|
class FixCallable(fixer_base.BaseFix):
|
||||||
|
|
||||||
# Ignore callable(*args) or use of keywords.
|
# Ignore callable(*args) or use of keywords.
|
||||||
# Either could be a hint that the builtin callable() is not being used.
|
# Either could be a hint that the builtin callable() is not being used.
|
||||||
|
|
|
@ -27,15 +27,15 @@ as an argument to a function that introspects the argument).
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from .. import patcomp
|
from .. import patcomp
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, LParen, RParen, ArgList, Dot, set
|
from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot, set
|
||||||
from . import util
|
from .. import fixer_util
|
||||||
|
|
||||||
|
|
||||||
iter_exempt = util.consuming_calls | set(["iter"])
|
iter_exempt = fixer_util.consuming_calls | set(["iter"])
|
||||||
|
|
||||||
|
|
||||||
class FixDict(basefix.BaseFix):
|
class FixDict(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< head=any+
|
power< head=any+
|
||||||
trailer< '.' method=('keys'|'items'|'values'|
|
trailer< '.' method=('keys'|'items'|'values'|
|
||||||
|
@ -92,7 +92,7 @@ class FixDict(basefix.BaseFix):
|
||||||
return results["func"].value in iter_exempt
|
return results["func"].value in iter_exempt
|
||||||
else:
|
else:
|
||||||
# list(d.keys()) -> list(d.keys()), etc.
|
# list(d.keys()) -> list(d.keys()), etc.
|
||||||
return results["func"].value in util.consuming_calls
|
return results["func"].value in fixer_util.consuming_calls
|
||||||
if not isiter:
|
if not isiter:
|
||||||
return False
|
return False
|
||||||
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
# for ... in d.iterkeys() -> for ... in d.keys(), etc.
|
||||||
|
|
|
@ -24,8 +24,8 @@ The following cases will be converted:
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Assign, Attr, Name, is_tuple, is_list, reversed
|
from ..fixer_util import Assign, Attr, Name, is_tuple, is_list, reversed
|
||||||
|
|
||||||
def find_excepts(nodes):
|
def find_excepts(nodes):
|
||||||
for i, n in enumerate(nodes):
|
for i, n in enumerate(nodes):
|
||||||
|
@ -33,7 +33,7 @@ def find_excepts(nodes):
|
||||||
if n.children[0].value == 'except':
|
if n.children[0].value == 'except':
|
||||||
yield (n, nodes[i+2])
|
yield (n, nodes[i+2])
|
||||||
|
|
||||||
class FixExcept(basefix.BaseFix):
|
class FixExcept(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
try_stmt< 'try' ':' suite
|
try_stmt< 'try' ':' suite
|
||||||
|
|
|
@ -11,11 +11,11 @@ exec code in ns1, ns2 -> exec(code, ns1, ns2)
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Comma, Name, Call
|
from ..fixer_util import Comma, Name, Call
|
||||||
|
|
||||||
|
|
||||||
class FixExec(basefix.BaseFix):
|
class FixExec(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
|
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
|
||||||
|
|
|
@ -8,11 +8,11 @@ exec() function.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Comma, Name, Call, LParen, RParen, Dot
|
from ..fixer_util import Comma, Name, Call, LParen, RParen, Dot
|
||||||
|
|
||||||
|
|
||||||
class FixExecfile(basefix.BaseFix):
|
class FixExecfile(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
|
||||||
|
|
|
@ -15,10 +15,10 @@ Python 2.6 figure it out.
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, ListComp, in_special_context
|
from ..fixer_util import Name, Call, ListComp, in_special_context
|
||||||
|
|
||||||
class FixFilter(basefix.ConditionalFix):
|
class FixFilter(fixer_base.ConditionalFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
filter_lambda=power<
|
filter_lambda=power<
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
# Author: Collin Winter
|
# Author: Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
|
|
||||||
class FixFuncattrs(basefix.BaseFix):
|
class FixFuncattrs(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
|
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
|
||||||
| 'func_name' | 'func_defaults' | 'func_code'
|
| 'func_name' | 'func_defaults' | 'func_code'
|
||||||
|
|
|
@ -5,10 +5,10 @@ from __future__ import foo is replaced with an empty line.
|
||||||
# Author: Christian Heimes
|
# Author: Christian Heimes
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import BlankLine
|
from ..fixer_util import BlankLine
|
||||||
|
|
||||||
class FixFuture(basefix.BaseFix):
|
class FixFuture(fixer_base.BaseFix):
|
||||||
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
|
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
|
||||||
|
|
||||||
# This should be run last -- some things check for the import
|
# This should be run last -- some things check for the import
|
||||||
|
|
|
@ -32,11 +32,11 @@ CAVEATS:
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
|
|
||||||
class FixHasKey(basefix.BaseFix):
|
class FixHasKey(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
anchor=power<
|
anchor=power<
|
||||||
|
|
|
@ -28,13 +28,13 @@ into
|
||||||
# Author: Jacques Frechet, Collin Winter
|
# Author: Jacques Frechet, Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Call, Comma, Name, Node, syms
|
from ..fixer_util import Call, Comma, Name, Node, syms
|
||||||
|
|
||||||
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
|
||||||
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
TYPE = "power< 'type' trailer< '(' x=any ')' > >"
|
||||||
|
|
||||||
class FixIdioms(basefix.BaseFix):
|
class FixIdioms(fixer_base.BaseFix):
|
||||||
|
|
||||||
explicit = True # The user must ask for this fixer
|
explicit = True # The user must ask for this fixer
|
||||||
|
|
||||||
|
|
|
@ -11,11 +11,11 @@ Becomes:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from os.path import dirname, join, exists, pathsep
|
from os.path import dirname, join, exists, pathsep
|
||||||
from .util import FromImport
|
from ..fixer_util import FromImport
|
||||||
|
|
||||||
class FixImport(basefix.BaseFix):
|
class FixImport(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
import_from< type='from' imp=any 'import' any >
|
import_from< type='from' imp=any 'import' any >
|
||||||
|
|
|
@ -8,8 +8,8 @@ Fixes:
|
||||||
# Author: Collin Winter
|
# Author: Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, attr_chain, any, set
|
from ..fixer_util import Name, attr_chain, any, set
|
||||||
import __builtin__
|
import __builtin__
|
||||||
builtin_names = [name for name in dir(__builtin__)
|
builtin_names = [name for name in dir(__builtin__)
|
||||||
if name not in ("__name__", "__doc__")]
|
if name not in ("__name__", "__doc__")]
|
||||||
|
@ -150,6 +150,123 @@ MAPPING = {"StringIO": ("io", ["StringIO"]),
|
||||||
'error', 'exit', 'exit_thread', 'get_ident',
|
'error', 'exit', 'exit_thread', 'get_ident',
|
||||||
'interrupt_main', 'stack_size', 'start_new',
|
'interrupt_main', 'stack_size', 'start_new',
|
||||||
'start_new_thread']),
|
'start_new_thread']),
|
||||||
|
'whichdb': ('dbm', ['whichdb']),
|
||||||
|
'anydbm': ('dbm', ['error', 'open']),
|
||||||
|
'dbhash': ('dbm.bsd', ['error', 'open']),
|
||||||
|
'dumbdbm': ('dbm.dumb', ['error', 'open', '_Database']),
|
||||||
|
'dbm': ('dbm.ndbm', ['error', 'open', 'library']),
|
||||||
|
'gdbm': ('dbm.gnu', ['error', 'open', 'open_flags']),
|
||||||
|
'xmlrpclib': ('xmlrpc.client',
|
||||||
|
['Error', 'ProtocolError', 'ResponseError', 'Fault',
|
||||||
|
'ServerProxy', 'Boolean', 'DateTime', 'Binary',
|
||||||
|
'ExpatParser', 'FastMarshaller', 'FastParser',
|
||||||
|
'FastUnmarshaller', 'MultiCall', 'MultiCallIterator',
|
||||||
|
'SlowParser', 'Marshaller', 'Unmarshaller', 'Server',
|
||||||
|
'Transport', 'SafeTransport', 'SgmlopParser',
|
||||||
|
'boolean', 'getparser', 'dumps', 'loads', 'escape',
|
||||||
|
'PARSE_ERROR', 'SERVER_ERROR', 'WRAPPERS',
|
||||||
|
'APPLICATION_ERROR', 'SYSTEM_ERROR',
|
||||||
|
'TRANSPORT_ERROR', 'NOT_WELLFORMED_ERROR',
|
||||||
|
'UNSUPPORTED_ENCODING', 'INVALID_ENCODING_CHAR',
|
||||||
|
'INVALID_XMLRPC', 'METHOD_NOT_FOUND',
|
||||||
|
'INVALID_METHOD_PARAMS', 'INTERNAL_ERROR',
|
||||||
|
'MININT', 'MAXINT']),
|
||||||
|
'DocXMLRPCServer': ('xmlrpc.server',
|
||||||
|
['CGIXMLRPCRequestHandler',
|
||||||
|
'DocCGIXMLRPCRequestHandler',
|
||||||
|
'DocXMLRPCRequestHandler', 'DocXMLRPCServer',
|
||||||
|
'ServerHTMLDoc', 'SimpleXMLRPCRequestHandler',
|
||||||
|
'SimpleXMLRPCServer', 'XMLRPCDocGenerator',
|
||||||
|
'resolve_dotted_attribute']),
|
||||||
|
'SimpleXMLRPCServer': ('xmlrpc.server',
|
||||||
|
['CGIXMLRPCRequestHandler',
|
||||||
|
'Fault', 'SimpleXMLRPCDispatcher',
|
||||||
|
'SimpleXMLRPCRequestHandler',
|
||||||
|
'SimpleXMLRPCServer', 'SocketServer',
|
||||||
|
'list_public_methods',
|
||||||
|
'remove_duplicates',
|
||||||
|
'resolve_dotted_attribute']),
|
||||||
|
'httplib': ('http.client',
|
||||||
|
['ACCEPTED', 'BAD_GATEWAY', 'BAD_REQUEST',
|
||||||
|
'BadStatusLine', 'CONFLICT', 'CONTINUE', 'CREATED',
|
||||||
|
'CannotSendHeader', 'CannotSendRequest',
|
||||||
|
'EXPECTATION_FAILED', 'FAILED_DEPENDENCY', 'FORBIDDEN',
|
||||||
|
'FOUND', 'FakeSocket', 'GATEWAY_TIMEOUT', 'GONE',
|
||||||
|
'HTTP', 'HTTPConnection', 'HTTPException',
|
||||||
|
'HTTPMessage', 'HTTPResponse', 'HTTPS',
|
||||||
|
'HTTPSConnection', 'HTTPS_PORT', 'HTTP_PORT',
|
||||||
|
'HTTP_VERSION_NOT_SUPPORTED', 'IM_USED',
|
||||||
|
'INSUFFICIENT_STORAGE', 'INTERNAL_SERVER_ERROR',
|
||||||
|
'ImproperConnectionState', 'IncompleteRead',
|
||||||
|
'InvalidURL', 'LENGTH_REQUIRED', 'LOCKED',
|
||||||
|
'LineAndFileWrapper', 'MAXAMOUNT', 'METHOD_NOT_ALLOWED',
|
||||||
|
'MOVED_PERMANENTLY', 'MULTIPLE_CHOICES', 'MULTI_STATUS',
|
||||||
|
'NON_AUTHORITATIVE_INFORMATION', 'NOT_ACCEPTABLE',
|
||||||
|
'NOT_EXTENDED', 'NOT_FOUND', 'NOT_IMPLEMENTED',
|
||||||
|
'NOT_MODIFIED', 'NO_CONTENT', 'NotConnected', 'OK',
|
||||||
|
'PARTIAL_CONTENT', 'PAYMENT_REQUIRED',
|
||||||
|
'PRECONDITION_FAILED', 'PROCESSING',
|
||||||
|
'PROXY_AUTHENTICATION_REQUIRED',
|
||||||
|
'REQUESTED_RANGE_NOT_SATISFIABLE',
|
||||||
|
'REQUEST_ENTITY_TOO_LARGE', 'REQUEST_TIMEOUT',
|
||||||
|
'REQUEST_URI_TOO_LONG', 'RESET_CONTENT',
|
||||||
|
'ResponseNotReady', 'SEE_OTHER', 'SERVICE_UNAVAILABLE',
|
||||||
|
'SSLFile', 'SWITCHING_PROTOCOLS', 'SharedSocket',
|
||||||
|
'SharedSocketClient', 'StringIO', 'TEMPORARY_REDIRECT',
|
||||||
|
'UNAUTHORIZED', 'UNPROCESSABLE_ENTITY',
|
||||||
|
'UNSUPPORTED_MEDIA_TYPE', 'UPGRADE_REQUIRED',
|
||||||
|
'USE_PROXY', 'UnimplementedFileMode', 'UnknownProtocol',
|
||||||
|
'UnknownTransferEncoding', 'error', 'responses']),
|
||||||
|
'Cookie': ('http.cookies',
|
||||||
|
['BaseCookie', 'Cookie', 'CookieError', 'Morsel',
|
||||||
|
'SerialCookie', 'SimpleCookie', 'SmartCookie']),
|
||||||
|
'cookielib': ('http.cookiejar',
|
||||||
|
['Absent', 'Cookie', 'CookieJar', 'CookiePolicy',
|
||||||
|
'DAYS', 'DEFAULT_HTTP_PORT', 'DefaultCookiePolicy',
|
||||||
|
'EPOCH_YEAR', 'ESCAPED_CHAR_RE', 'FileCookieJar',
|
||||||
|
'HEADER_ESCAPE_RE', 'HEADER_JOIN_ESCAPE_RE',
|
||||||
|
'HEADER_QUOTED_VALUE_RE', 'HEADER_TOKEN_RE',
|
||||||
|
'HEADER_VALUE_RE', 'HTTP_PATH_SAFE', 'IPV4_RE',
|
||||||
|
'ISO_DATE_RE', 'LOOSE_HTTP_DATE_RE', 'LWPCookieJar',
|
||||||
|
'LoadError', 'MISSING_FILENAME_TEXT', 'MONTHS',
|
||||||
|
'MONTHS_LOWER', 'MozillaCookieJar', 'STRICT_DATE_RE',
|
||||||
|
'TIMEZONE_RE', 'UTC_ZONES', 'WEEKDAY_RE',
|
||||||
|
'cut_port_re', 'deepvalues', 'domain_match',
|
||||||
|
'eff_request_host', 'escape_path', 'http2time',
|
||||||
|
'is_HDN', 'is_third_party', 'iso2time',
|
||||||
|
'join_header_words', 'liberal_is_HDN', 'logger',
|
||||||
|
'lwp_cookie_str', 'month', 'offset_from_tz_string',
|
||||||
|
'parse_ns_headers', 'reach', 'request_host',
|
||||||
|
'request_path', 'request_port', 'split_header_words',
|
||||||
|
'time', 'time2isoz', 'time2netscape', 'unmatched',
|
||||||
|
'uppercase_escaped_char', 'urllib',
|
||||||
|
'user_domain_match', 'vals_sorted_by_key']),
|
||||||
|
'BaseHTTPServer': ('http.server',
|
||||||
|
['BaseHTTPRequestHandler',
|
||||||
|
'DEFAULT_ERROR_MESSAGE', 'HTTPServer']),
|
||||||
|
'SimpleHTTPServer': ('http.server', ['SimpleHTTPRequestHandler']),
|
||||||
|
'CGIHTTPServer': ('http.server',
|
||||||
|
['CGIHTTPRequestHandler', 'executable',
|
||||||
|
'nobody_uid', 'nobody']),
|
||||||
|
'test.test_support': ('test.support',
|
||||||
|
["Error", "TestFailed", "TestSkipped", "ResourceDenied",
|
||||||
|
"import_module", "verbose", "use_resources",
|
||||||
|
"max_memuse", "record_original_stdout",
|
||||||
|
"get_original_stdout", "unload", "unlink", "rmtree",
|
||||||
|
"forget", "is_resource_enabled", "requires",
|
||||||
|
"find_unused_port", "bind_port",
|
||||||
|
"fcmp", "is_jython", "TESTFN", "HOST",
|
||||||
|
"FUZZ", "findfile", "verify", "vereq", "sortdict",
|
||||||
|
"check_syntax_error", "open_urlresource", "WarningMessage",
|
||||||
|
"catch_warning", "CleanImport", "EnvironmentVarGuard",
|
||||||
|
"TransientResource", "captured_output", "captured_stdout",
|
||||||
|
"TransientResource", "transient_internet", "run_with_locale",
|
||||||
|
"set_memlimit", "bigmemtest", "bigaddrspacetest",
|
||||||
|
"BasicTestRunner", "run_unittest", "run_doctest",
|
||||||
|
"threading_setup", "threading_cleanup", "reap_children"]),
|
||||||
|
'commands': ('subprocess', ['getstatusoutput', 'getoutput']),
|
||||||
|
'UserString' : ('collections', ['UserString']),
|
||||||
|
'UserList' : ('collections', ['UserList']),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -180,7 +297,7 @@ def build_pattern():
|
||||||
yield """bare_name=%s""" % alternates(bare)
|
yield """bare_name=%s""" % alternates(bare)
|
||||||
|
|
||||||
|
|
||||||
class FixImports(basefix.BaseFix):
|
class FixImports(fixer_base.BaseFix):
|
||||||
PATTERN = "|".join(build_pattern())
|
PATTERN = "|".join(build_pattern())
|
||||||
|
|
||||||
order = "pre" # Pre-order tree traversal
|
order = "pre" # Pre-order tree traversal
|
||||||
|
|
|
@ -2,15 +2,15 @@
|
||||||
# Author: Andre Roberge
|
# Author: Andre Roberge
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Call, Name
|
from ..fixer_util import Call, Name
|
||||||
from .. import patcomp
|
from .. import patcomp
|
||||||
|
|
||||||
|
|
||||||
context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
|
context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
|
||||||
|
|
||||||
|
|
||||||
class FixInput(basefix.BaseFix):
|
class FixInput(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< 'input' args=trailer< '(' [any] ')' > >
|
power< 'input' args=trailer< '(' [any] ')' > >
|
||||||
|
|
|
@ -7,11 +7,11 @@ intern(s) -> sys.intern(s)"""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Attr
|
from ..fixer_util import Name, Attr
|
||||||
|
|
||||||
|
|
||||||
class FixIntern(basefix.BaseFix):
|
class FixIntern(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< 'intern'
|
power< 'intern'
|
||||||
|
|
|
@ -8,10 +8,10 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
class FixItertools(basefix.BaseFix):
|
class FixItertools(fixer_base.BaseFix):
|
||||||
it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
|
it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< it='itertools'
|
power< it='itertools'
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import BlankLine
|
from ..fixer_util import BlankLine
|
||||||
|
|
||||||
class FixItertoolsImports(basefix.BaseFix):
|
class FixItertoolsImports(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
import_from< 'from' 'itertools' 'import' imports=any >
|
import_from< 'from' 'itertools' 'import' imports=any >
|
||||||
""" %(locals())
|
""" %(locals())
|
||||||
|
|
|
@ -8,11 +8,11 @@ This also strips the trailing 'L' or 'l' from long loterals.
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Number
|
from ..fixer_util import Name, Number
|
||||||
|
|
||||||
|
|
||||||
class FixLong(basefix.BaseFix):
|
class FixLong(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
(long_type = 'long' | number = NUMBER)
|
(long_type = 'long' | number = NUMBER)
|
||||||
|
|
|
@ -21,11 +21,11 @@ soon as the shortest argument is exhausted.
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, ListComp, in_special_context
|
from ..fixer_util import Name, Call, ListComp, in_special_context
|
||||||
from ..pygram import python_symbols as syms
|
from ..pygram import python_symbols as syms
|
||||||
|
|
||||||
class FixMap(basefix.ConditionalFix):
|
class FixMap(fixer_base.ConditionalFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
map_none=power<
|
map_none=power<
|
||||||
|
|
|
@ -3,8 +3,8 @@
|
||||||
# Author: Christian Heimes
|
# Author: Christian Heimes
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
MAP = {
|
MAP = {
|
||||||
"im_func" : "__func__",
|
"im_func" : "__func__",
|
||||||
|
@ -12,7 +12,7 @@ MAP = {
|
||||||
"im_class" : "__self__.__class__"
|
"im_class" : "__self__.__class__"
|
||||||
}
|
}
|
||||||
|
|
||||||
class FixMethodattrs(basefix.BaseFix):
|
class FixMethodattrs(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
|
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -6,10 +6,10 @@
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
|
|
||||||
|
|
||||||
class FixNe(basefix.BaseFix):
|
class FixNe(fixer_base.BaseFix):
|
||||||
# This is so simple that we don't need the pattern compiler.
|
# This is so simple that we don't need the pattern compiler.
|
||||||
|
|
||||||
def match(self, node):
|
def match(self, node):
|
||||||
|
|
|
@ -8,13 +8,13 @@
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from ..pygram import python_symbols as syms
|
from ..pygram import python_symbols as syms
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, find_binding, any
|
from ..fixer_util import Name, Call, find_binding, any
|
||||||
|
|
||||||
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
bind_warning = "Calls to builtin next() possibly shadowed by global binding"
|
||||||
|
|
||||||
|
|
||||||
class FixNext(basefix.BaseFix):
|
class FixNext(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
|
||||||
|
|
|
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
# Author: Collin Winter
|
# Author: Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, syms
|
from ..fixer_util import Name, syms
|
||||||
|
|
||||||
class FixNonzero(basefix.BaseFix):
|
class FixNonzero(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
classdef< 'class' any+ ':'
|
classdef< 'class' any+ ':'
|
||||||
suite< any*
|
suite< any*
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Number, set
|
from ..fixer_util import Number, set
|
||||||
|
|
||||||
|
|
||||||
class FixNumliterals(basefix.BaseFix):
|
class FixNumliterals(fixer_base.BaseFix):
|
||||||
# This is so simple that we don't need the pattern compiler.
|
# This is so simple that we don't need the pattern compiler.
|
||||||
|
|
||||||
def match(self, node):
|
def match(self, node):
|
||||||
|
|
|
@ -17,8 +17,8 @@ No changes are applied if print_function is imported from __future__
|
||||||
from .. import patcomp
|
from .. import patcomp
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, Comma, String, is_tuple
|
from ..fixer_util import Name, Call, Comma, String, is_tuple
|
||||||
|
|
||||||
|
|
||||||
parend_expr = patcomp.compile_pattern(
|
parend_expr = patcomp.compile_pattern(
|
||||||
|
@ -26,7 +26,7 @@ parend_expr = patcomp.compile_pattern(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class FixPrint(basefix.ConditionalFix):
|
class FixPrint(fixer_base.ConditionalFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
simple_stmt< bare='print' any > | print_stmt
|
simple_stmt< bare='print' any > | print_stmt
|
||||||
|
|
|
@ -24,10 +24,10 @@ CAVEATS:
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, Attr, ArgList, is_tuple
|
from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
|
||||||
|
|
||||||
class FixRaise(basefix.BaseFix):
|
class FixRaise(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
|
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
# Author: Andre Roberge
|
# Author: Andre Roberge
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
class FixRawInput(basefix.BaseFix):
|
class FixRawInput(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< name='raw_input' trailer< '(' [any] ')' > >
|
power< name='raw_input' trailer< '(' [any] ')' > >
|
||||||
|
|
|
@ -7,8 +7,8 @@ Fixes:
|
||||||
# based on Collin Winter's fix_import
|
# based on Collin Winter's fix_import
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, attr_chain, any, set
|
from ..fixer_util import Name, attr_chain, any, set
|
||||||
|
|
||||||
MAPPING = {"sys": {"maxint" : "maxsize"},
|
MAPPING = {"sys": {"maxint" : "maxsize"},
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ def build_pattern():
|
||||||
#yield """bare_name=%s""" % alternates(bare)
|
#yield """bare_name=%s""" % alternates(bare)
|
||||||
|
|
||||||
|
|
||||||
class FixRenames(basefix.BaseFix):
|
class FixRenames(fixer_base.BaseFix):
|
||||||
PATTERN = "|".join(build_pattern())
|
PATTERN = "|".join(build_pattern())
|
||||||
|
|
||||||
order = "pre" # Pre-order tree traversal
|
order = "pre" # Pre-order tree traversal
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
|
"""Fixer that transforms `xyzzy` into repr(xyzzy)."""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Call, Name
|
from ..fixer_util import Call, Name
|
||||||
|
|
||||||
|
|
||||||
class FixRepr(basefix.BaseFix):
|
class FixRepr(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
atom < '`' expr=any '`' >
|
atom < '`' expr=any '`' >
|
||||||
|
|
|
@ -4,11 +4,11 @@
|
||||||
"""Fixer for StandardError -> Exception."""
|
"""Fixer for StandardError -> Exception."""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
|
|
||||||
class FixStandarderror(basefix.BaseFix):
|
class FixStandarderror(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
'StandardError'
|
'StandardError'
|
||||||
|
|
|
@ -10,10 +10,10 @@ g.throw("foo"[, V[, T]]) will warn about string exceptions."""
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, ArgList, Attr, is_tuple
|
from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
|
||||||
|
|
||||||
class FixThrow(basefix.BaseFix):
|
class FixThrow(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< any trailer< '.' 'throw' >
|
power< any trailer< '.' 'throw' >
|
||||||
|
|
|
@ -21,14 +21,14 @@ It will also support lambdas:
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Assign, Name, Newline, Number, Subscript, syms
|
from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms
|
||||||
|
|
||||||
def is_docstring(stmt):
|
def is_docstring(stmt):
|
||||||
return isinstance(stmt, pytree.Node) and \
|
return isinstance(stmt, pytree.Node) and \
|
||||||
stmt.children[0].type == token.STRING
|
stmt.children[0].type == token.STRING
|
||||||
|
|
||||||
class FixTupleParams(basefix.BaseFix):
|
class FixTupleParams(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
funcdef< 'def' any parameters< '(' args=any ')' >
|
funcdef< 'def' any parameters< '(' args=any ')' >
|
||||||
['->' any] ':' suite=any+ >
|
['->' any] ':' suite=any+ >
|
||||||
|
|
|
@ -21,8 +21,8 @@ There should be another fixer that handles at least the following constants:
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
_TYPE_MAPPING = {
|
_TYPE_MAPPING = {
|
||||||
'BooleanType' : 'bool',
|
'BooleanType' : 'bool',
|
||||||
|
@ -51,7 +51,7 @@ _TYPE_MAPPING = {
|
||||||
|
|
||||||
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
|
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
|
||||||
|
|
||||||
class FixTypes(basefix.BaseFix):
|
class FixTypes(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = '|'.join(_pats)
|
PATTERN = '|'.join(_pats)
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,9 @@
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
|
|
||||||
class FixUnicode(basefix.BaseFix):
|
class FixUnicode(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = "STRING | NAME<'unicode' | 'unichr'>"
|
PATTERN = "STRING | NAME<'unicode' | 'unichr'>"
|
||||||
|
|
||||||
|
|
|
@ -7,9 +7,9 @@ uses of colons. It does not touch other uses of whitespace.
|
||||||
|
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..pgen2 import token
|
from ..pgen2 import token
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
|
|
||||||
class FixWsComma(basefix.BaseFix):
|
class FixWsComma(fixer_base.BaseFix):
|
||||||
|
|
||||||
explicit = True # The user must ask for this fixers
|
explicit = True # The user must ask for this fixers
|
||||||
|
|
||||||
|
|
|
@ -4,12 +4,12 @@
|
||||||
"""Fixer that changes xrange(...) into range(...)."""
|
"""Fixer that changes xrange(...) into range(...)."""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, consuming_calls
|
from ..fixer_util import Name, Call, consuming_calls
|
||||||
from .. import patcomp
|
from .. import patcomp
|
||||||
|
|
||||||
|
|
||||||
class FixXrange(basefix.BaseFix):
|
class FixXrange(fixer_base.BaseFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< (name='range'|name='xrange') trailer< '(' [any] ')' > any* >
|
power< (name='range'|name='xrange') trailer< '(' [any] ')' > any* >
|
||||||
|
|
|
@ -4,11 +4,11 @@ This fixer will also convert g(f.xreadlines) into g(f.__iter__)."""
|
||||||
# Author: Collin Winter
|
# Author: Collin Winter
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .import basefix
|
from .. import fixer_base
|
||||||
from .util import Name
|
from ..fixer_util import Name
|
||||||
|
|
||||||
|
|
||||||
class FixXreadlines(basefix.BaseFix):
|
class FixXreadlines(fixer_base.BaseFix):
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
|
||||||
|
|
|
|
||||||
|
|
|
@ -8,10 +8,10 @@ iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from . import basefix
|
from .. import fixer_base
|
||||||
from .util import Name, Call, in_special_context
|
from ..fixer_util import Name, Call, in_special_context
|
||||||
|
|
||||||
class FixZip(basefix.ConditionalFix):
|
class FixZip(fixer_base.ConditionalFix):
|
||||||
|
|
||||||
PATTERN = """
|
PATTERN = """
|
||||||
power< 'zip' args=trailer< '(' [any] ')' >
|
power< 'zip' args=trailer< '(' [any] ')' >
|
||||||
|
|
|
@ -30,11 +30,13 @@ from . import patcomp
|
||||||
from . import fixes
|
from . import fixes
|
||||||
from . import pygram
|
from . import pygram
|
||||||
|
|
||||||
def main(args=None):
|
def main(fixer_dir, args=None):
|
||||||
"""Main program.
|
"""Main program.
|
||||||
|
|
||||||
Call without arguments to use sys.argv[1:] as the arguments; or
|
Args:
|
||||||
call with a list of arguments (excluding sys.argv[0]).
|
fixer_dir: directory where fixer modules are located.
|
||||||
|
args: optional; a list of command line arguments. If omitted,
|
||||||
|
sys.argv[1:] is used.
|
||||||
|
|
||||||
Returns a suggested exit status (0, 1, 2).
|
Returns a suggested exit status (0, 1, 2).
|
||||||
"""
|
"""
|
||||||
|
@ -57,7 +59,7 @@ def main(args=None):
|
||||||
options, args = parser.parse_args(args)
|
options, args = parser.parse_args(args)
|
||||||
if options.list_fixes:
|
if options.list_fixes:
|
||||||
print "Available transformations for the -f/--fix option:"
|
print "Available transformations for the -f/--fix option:"
|
||||||
for fixname in get_all_fix_names():
|
for fixname in get_all_fix_names(fixer_dir):
|
||||||
print fixname
|
print fixname
|
||||||
if not args:
|
if not args:
|
||||||
return 0
|
return 0
|
||||||
|
@ -76,7 +78,7 @@ def main(args=None):
|
||||||
logging.basicConfig(format='%(name)s: %(message)s', level=logging.INFO)
|
logging.basicConfig(format='%(name)s: %(message)s', level=logging.INFO)
|
||||||
|
|
||||||
# Initialize the refactoring tool
|
# Initialize the refactoring tool
|
||||||
rt = RefactoringTool(options)
|
rt = RefactoringTool(fixer_dir, options)
|
||||||
|
|
||||||
# Refactor all files and directories passed as arguments
|
# Refactor all files and directories passed as arguments
|
||||||
if not rt.errors:
|
if not rt.errors:
|
||||||
|
@ -87,10 +89,10 @@ def main(args=None):
|
||||||
return int(bool(rt.errors))
|
return int(bool(rt.errors))
|
||||||
|
|
||||||
|
|
||||||
def get_all_fix_names():
|
def get_all_fix_names(fixer_dir):
|
||||||
"""Return a sorted list of all available fix names."""
|
"""Return a sorted list of all available fix names."""
|
||||||
fix_names = []
|
fix_names = []
|
||||||
names = os.listdir(os.path.dirname(fixes.__file__))
|
names = os.listdir(fixer_dir)
|
||||||
names.sort()
|
names.sort()
|
||||||
for name in names:
|
for name in names:
|
||||||
if name.startswith("fix_") and name.endswith(".py"):
|
if name.startswith("fix_") and name.endswith(".py"):
|
||||||
|
@ -138,11 +140,14 @@ def get_headnode_dict(fixer_list):
|
||||||
|
|
||||||
class RefactoringTool(object):
|
class RefactoringTool(object):
|
||||||
|
|
||||||
def __init__(self, options):
|
def __init__(self, fixer_dir, options):
|
||||||
"""Initializer.
|
"""Initializer.
|
||||||
|
|
||||||
The argument is an optparse.Values instance.
|
Args:
|
||||||
|
fixer_dir: directory in which to find fixer modules.
|
||||||
|
options: an optparse.Values instance.
|
||||||
"""
|
"""
|
||||||
|
self.fixer_dir = fixer_dir
|
||||||
self.options = options
|
self.options = options
|
||||||
self.errors = []
|
self.errors = []
|
||||||
self.logger = logging.getLogger("RefactoringTool")
|
self.logger = logging.getLogger("RefactoringTool")
|
||||||
|
@ -167,14 +172,15 @@ class RefactoringTool(object):
|
||||||
want a pre-order AST traversal, and post_order is the list that want
|
want a pre-order AST traversal, and post_order is the list that want
|
||||||
post-order traversal.
|
post-order traversal.
|
||||||
"""
|
"""
|
||||||
|
fixer_pkg = ".".join(self.fixer_dir.split(os.path.sep))
|
||||||
pre_order_fixers = []
|
pre_order_fixers = []
|
||||||
post_order_fixers = []
|
post_order_fixers = []
|
||||||
fix_names = self.options.fix
|
fix_names = self.options.fix
|
||||||
if not fix_names or "all" in fix_names:
|
if not fix_names or "all" in fix_names:
|
||||||
fix_names = get_all_fix_names()
|
fix_names = get_all_fix_names(self.fixer_dir)
|
||||||
for fix_name in fix_names:
|
for fix_name in fix_names:
|
||||||
try:
|
try:
|
||||||
mod = __import__("lib2to3.fixes.fix_" + fix_name, {}, {}, ["*"])
|
mod = __import__(fixer_pkg + ".fix_" + fix_name, {}, {}, ["*"])
|
||||||
except ImportError:
|
except ImportError:
|
||||||
self.log_error("Can't find transformation %s", fix_name)
|
self.log_error("Can't find transformation %s", fix_name)
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -29,7 +29,7 @@ class Test_all(support.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
options = Options(fix=["all", "idioms", "ws_comma", "buffer"],
|
options = Options(fix=["all", "idioms", "ws_comma", "buffer"],
|
||||||
print_function=False)
|
print_function=False)
|
||||||
self.refactor = refactor.RefactoringTool(options)
|
self.refactor = refactor.RefactoringTool("lib2to3/fixes", options)
|
||||||
|
|
||||||
def test_all_project_files(self):
|
def test_all_project_files(self):
|
||||||
for filepath in support.all_project_files():
|
for filepath in support.all_project_files():
|
||||||
|
|
|
@ -10,13 +10,14 @@ except ImportError:
|
||||||
|
|
||||||
# Python imports
|
# Python imports
|
||||||
import unittest
|
import unittest
|
||||||
|
from itertools import chain
|
||||||
from os.path import dirname, pathsep
|
from os.path import dirname, pathsep
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pygram
|
from .. import pygram
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from .. import refactor
|
from .. import refactor
|
||||||
from ..fixes import util
|
from .. import fixer_util
|
||||||
|
|
||||||
|
|
||||||
class Options:
|
class Options:
|
||||||
|
@ -29,11 +30,10 @@ class Options:
|
||||||
class FixerTestCase(support.TestCase):
|
class FixerTestCase(support.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
options = Options(fix=[self.fixer], print_function=False)
|
options = Options(fix=[self.fixer], print_function=False)
|
||||||
self.refactor = refactor.RefactoringTool(options)
|
self.refactor = refactor.RefactoringTool("lib2to3/fixes", options)
|
||||||
self.fixer_log = []
|
self.fixer_log = []
|
||||||
self.filename = "<string>"
|
self.filename = "<string>"
|
||||||
|
|
||||||
from itertools import chain
|
|
||||||
for order in (self.refactor.pre_order.values(),\
|
for order in (self.refactor.pre_order.values(),\
|
||||||
self.refactor.post_order.values()):
|
self.refactor.post_order.values()):
|
||||||
for fixer in chain(*order):
|
for fixer in chain(*order):
|
||||||
|
@ -70,7 +70,7 @@ class FixerTestCase(support.TestCase):
|
||||||
fix = [self.fixer]
|
fix = [self.fixer]
|
||||||
fix.extend(names)
|
fix.extend(names)
|
||||||
options = Options(fix=fix, print_function=False)
|
options = Options(fix=fix, print_function=False)
|
||||||
r = refactor.RefactoringTool(options)
|
r = refactor.RefactoringTool("lib2to3/fixes", options)
|
||||||
(pre, post) = r.get_fixers()
|
(pre, post) = r.get_fixers()
|
||||||
n = "fix_" + self.fixer
|
n = "fix_" + self.fixer
|
||||||
if post and post[-1].__class__.__module__.endswith(n):
|
if post and post[-1].__class__.__module__.endswith(n):
|
||||||
|
@ -1109,7 +1109,7 @@ class Test_dict(FixerTestCase):
|
||||||
self.check(b, a)
|
self.check(b, a)
|
||||||
|
|
||||||
def test_unchanged(self):
|
def test_unchanged(self):
|
||||||
for wrapper in util.consuming_calls:
|
for wrapper in fixer_util.consuming_calls:
|
||||||
s = "s = %s(d.keys())" % wrapper
|
s = "s = %s(d.keys())" % wrapper
|
||||||
self.unchanged(s)
|
self.unchanged(s)
|
||||||
|
|
||||||
|
@ -1302,7 +1302,7 @@ class Test_xrange(FixerTestCase):
|
||||||
self.unchanged("x in range(10, 3, 9)")
|
self.unchanged("x in range(10, 3, 9)")
|
||||||
|
|
||||||
def test_in_consuming_context(self):
|
def test_in_consuming_context(self):
|
||||||
for call in util.consuming_calls:
|
for call in fixer_util.consuming_calls:
|
||||||
self.unchanged("a = %s(range(10))" % call)
|
self.unchanged("a = %s(range(10))" % call)
|
||||||
|
|
||||||
class Test_raw_input(FixerTestCase):
|
class Test_raw_input(FixerTestCase):
|
||||||
|
|
|
@ -10,7 +10,8 @@ import os.path
|
||||||
|
|
||||||
# Local imports
|
# Local imports
|
||||||
from .. import pytree
|
from .. import pytree
|
||||||
from ..fixes import util
|
from .. import fixer_util
|
||||||
|
from ..fixer_util import Attr, Name
|
||||||
|
|
||||||
|
|
||||||
def parse(code, strip_levels=0):
|
def parse(code, strip_levels=0):
|
||||||
|
@ -25,13 +26,13 @@ def parse(code, strip_levels=0):
|
||||||
class MacroTestCase(support.TestCase):
|
class MacroTestCase(support.TestCase):
|
||||||
def assertStr(self, node, string):
|
def assertStr(self, node, string):
|
||||||
if isinstance(node, (tuple, list)):
|
if isinstance(node, (tuple, list)):
|
||||||
node = pytree.Node(util.syms.simple_stmt, node)
|
node = pytree.Node(fixer_util.syms.simple_stmt, node)
|
||||||
self.assertEqual(str(node), string)
|
self.assertEqual(str(node), string)
|
||||||
|
|
||||||
|
|
||||||
class Test_is_tuple(support.TestCase):
|
class Test_is_tuple(support.TestCase):
|
||||||
def is_tuple(self, string):
|
def is_tuple(self, string):
|
||||||
return util.is_tuple(parse(string, strip_levels=2))
|
return fixer_util.is_tuple(parse(string, strip_levels=2))
|
||||||
|
|
||||||
def test_valid(self):
|
def test_valid(self):
|
||||||
self.failUnless(self.is_tuple("(a, b)"))
|
self.failUnless(self.is_tuple("(a, b)"))
|
||||||
|
@ -47,7 +48,7 @@ class Test_is_tuple(support.TestCase):
|
||||||
|
|
||||||
class Test_is_list(support.TestCase):
|
class Test_is_list(support.TestCase):
|
||||||
def is_list(self, string):
|
def is_list(self, string):
|
||||||
return util.is_list(parse(string, strip_levels=2))
|
return fixer_util.is_list(parse(string, strip_levels=2))
|
||||||
|
|
||||||
def test_valid(self):
|
def test_valid(self):
|
||||||
self.failUnless(self.is_list("[]"))
|
self.failUnless(self.is_list("[]"))
|
||||||
|
@ -62,23 +63,18 @@ class Test_is_list(support.TestCase):
|
||||||
|
|
||||||
class Test_Attr(MacroTestCase):
|
class Test_Attr(MacroTestCase):
|
||||||
def test(self):
|
def test(self):
|
||||||
from ..fixes.util import Attr, Name
|
|
||||||
call = parse("foo()", strip_levels=2)
|
call = parse("foo()", strip_levels=2)
|
||||||
|
|
||||||
self.assertStr(Attr(Name("a"), Name("b")), "a.b")
|
self.assertStr(Attr(Name("a"), Name("b")), "a.b")
|
||||||
self.assertStr(Attr(call, Name("b")), "foo().b")
|
self.assertStr(Attr(call, Name("b")), "foo().b")
|
||||||
|
|
||||||
def test_returns(self):
|
def test_returns(self):
|
||||||
from ..fixes.util import Attr, Name
|
|
||||||
|
|
||||||
attr = Attr(Name("a"), Name("b"))
|
attr = Attr(Name("a"), Name("b"))
|
||||||
self.assertEqual(type(attr), list)
|
self.assertEqual(type(attr), list)
|
||||||
|
|
||||||
|
|
||||||
class Test_Name(MacroTestCase):
|
class Test_Name(MacroTestCase):
|
||||||
def test(self):
|
def test(self):
|
||||||
from ..fixes.util import Name
|
|
||||||
|
|
||||||
self.assertStr(Name("a"), "a")
|
self.assertStr(Name("a"), "a")
|
||||||
self.assertStr(Name("foo.foo().bar"), "foo.foo().bar")
|
self.assertStr(Name("foo.foo().bar"), "foo.foo().bar")
|
||||||
self.assertStr(Name("a", prefix="b"), "ba")
|
self.assertStr(Name("a", prefix="b"), "ba")
|
||||||
|
@ -88,7 +84,7 @@ class Test_does_tree_import(support.TestCase):
|
||||||
def _find_bind_rec(self, name, node):
|
def _find_bind_rec(self, name, node):
|
||||||
# Search a tree for a binding -- used to find the starting
|
# Search a tree for a binding -- used to find the starting
|
||||||
# point for these tests.
|
# point for these tests.
|
||||||
c = util.find_binding(name, node)
|
c = fixer_util.find_binding(name, node)
|
||||||
if c: return c
|
if c: return c
|
||||||
for child in node.children:
|
for child in node.children:
|
||||||
c = self._find_bind_rec(name, child)
|
c = self._find_bind_rec(name, child)
|
||||||
|
@ -98,7 +94,7 @@ class Test_does_tree_import(support.TestCase):
|
||||||
node = parse(string)
|
node = parse(string)
|
||||||
# Find the binding of start -- that's what we'll go from
|
# Find the binding of start -- that's what we'll go from
|
||||||
node = self._find_bind_rec('start', node)
|
node = self._find_bind_rec('start', node)
|
||||||
return util.does_tree_import(package, name, node)
|
return fixer_util.does_tree_import(package, name, node)
|
||||||
|
|
||||||
def try_with(self, string):
|
def try_with(self, string):
|
||||||
failing_tests = (("a", "a", "from a import b"),
|
failing_tests = (("a", "a", "from a import b"),
|
||||||
|
@ -130,7 +126,7 @@ class Test_does_tree_import(support.TestCase):
|
||||||
|
|
||||||
class Test_find_binding(support.TestCase):
|
class Test_find_binding(support.TestCase):
|
||||||
def find_binding(self, name, string, package=None):
|
def find_binding(self, name, string, package=None):
|
||||||
return util.find_binding(name, parse(string), package)
|
return fixer_util.find_binding(name, parse(string), package)
|
||||||
|
|
||||||
def test_simple_assignment(self):
|
def test_simple_assignment(self):
|
||||||
self.failUnless(self.find_binding("a", "a = b"))
|
self.failUnless(self.find_binding("a", "a = b"))
|
||||||
|
|
Loading…
Reference in New Issue