Merged revisions 61602-61723 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r61626 | david.wolever | 2008-03-19 17:19:16 +0100 (Mi, 19 Mär 2008) | 1 line Added fixer for implicit local imports. See #2414. ........ r61628 | david.wolever | 2008-03-19 17:57:43 +0100 (Mi, 19 Mär 2008) | 1 line Added a class for tests which should not run if a particular import is found. ........ r61629 | collin.winter | 2008-03-19 17:58:19 +0100 (Mi, 19 Mär 2008) | 1 line Two more relative import fixes in pgen2. ........ r61635 | david.wolever | 2008-03-19 20:16:03 +0100 (Mi, 19 Mär 2008) | 1 line Fixed print fixer so it will do the Right Thing when it encounters __future__.print_function. 2to3 gets upset, though, so the tests have been commented out. ........ r61637 | david.wolever | 2008-03-19 21:37:17 +0100 (Mi, 19 Mär 2008) | 3 lines Added a fixer for itertools imports (from itertools import imap, ifilterfalse --> from itertools import filterfalse) ........ r61645 | david.wolever | 2008-03-19 23:22:35 +0100 (Mi, 19 Mär 2008) | 1 line SVN is happier when you add the files you create... -_-' ........ r61654 | david.wolever | 2008-03-20 01:09:56 +0100 (Do, 20 Mär 2008) | 1 line Added an explicit sort order to fixers -- fixes problems like #2427 ........ r61664 | david.wolever | 2008-03-20 04:32:40 +0100 (Do, 20 Mär 2008) | 3 lines Fixes #2428 -- comments are no longer eatten by __future__ fixer. ........ r61673 | david.wolever | 2008-03-20 17:22:40 +0100 (Do, 20 Mär 2008) | 1 line Added 2to3 node pretty-printer ........ r61679 | david.wolever | 2008-03-20 20:50:42 +0100 (Do, 20 Mär 2008) | 1 line Made node printing a little bit prettier ........ r61723 | martin.v.loewis | 2008-03-22 00:59:27 +0100 (Sa, 22 Mär 2008) | 2 lines Fix whitespace. ........
This commit is contained in:
parent
0e9ab5f2f0
commit
baf267ceae
|
@ -16,6 +16,7 @@ except NameError:
|
|||
# Local imports
|
||||
from ..patcomp import PatternCompiler
|
||||
from .. import pygram
|
||||
from .util import does_tree_import
|
||||
|
||||
class BaseFix(object):
|
||||
|
||||
|
@ -36,6 +37,8 @@ class BaseFix(object):
|
|||
used_names = set() # A set of all used NAMEs
|
||||
order = "post" # Does the fixer prefer pre- or post-order traversal
|
||||
explicit = False # Is this ignored by refactor.py -f all?
|
||||
run_order = 5 # Fixers will be sorted by run order before execution
|
||||
# Lower numbers will be run first.
|
||||
|
||||
# Shortcut for access to Python grammar symbols
|
||||
syms = pygram.python_symbols
|
||||
|
@ -163,3 +166,23 @@ class BaseFix(object):
|
|||
filename - the name of the file the tree came from.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class ConditionalFix(BaseFix):
|
||||
""" Base class for fixers which not execute if an import is found. """
|
||||
|
||||
# This is the name of the import which, if found, will cause the test to be skipped
|
||||
skip_on = None
|
||||
|
||||
def start_tree(self, *args):
|
||||
super(ConditionalFix, self).start_tree(*args)
|
||||
self._should_skip = None
|
||||
|
||||
def should_skip(self, node):
|
||||
if self._should_skip is not None:
|
||||
return self._should_skip
|
||||
pkg = self.skip_on.split(".")
|
||||
name = pkg[-1]
|
||||
pkg = ".".join(pkg[:-1])
|
||||
self._should_skip = does_tree_import(pkg, name, node)
|
||||
return self._should_skip
|
||||
|
|
|
@ -16,9 +16,9 @@ Python 2.6 figure it out.
|
|||
# Local imports
|
||||
from ..pgen2 import token
|
||||
from . import basefix
|
||||
from .util import Name, Call, ListComp, does_tree_import, in_special_context
|
||||
from .util import Name, Call, ListComp, in_special_context
|
||||
|
||||
class FixFilter(basefix.BaseFix):
|
||||
class FixFilter(basefix.ConditionalFix):
|
||||
|
||||
PATTERN = """
|
||||
filter_lambda=power<
|
||||
|
@ -47,20 +47,10 @@ class FixFilter(basefix.BaseFix):
|
|||
>
|
||||
"""
|
||||
|
||||
def start_tree(self, *args):
|
||||
super(FixFilter, self).start_tree(*args)
|
||||
self._new_filter = None
|
||||
|
||||
def has_new_filter(self, node):
|
||||
if self._new_filter is not None:
|
||||
return self._new_filter
|
||||
self._new_filter = does_tree_import('future_builtins', 'filter', node)
|
||||
return self._new_filter
|
||||
skip_on = "future_builtins.filter"
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.has_new_filter(node):
|
||||
# If filter is imported from future_builtins, we don't want to
|
||||
# do anything here.
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
if "filter_lambda" in results:
|
||||
|
|
|
@ -11,5 +11,10 @@ from .util import BlankLine
|
|||
class FixFuture(basefix.BaseFix):
|
||||
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
|
||||
|
||||
# This should be run last -- some things check for the import
|
||||
run_order = 10
|
||||
|
||||
def transform(self, node, results):
|
||||
return BlankLine()
|
||||
new = BlankLine()
|
||||
new.prefix = node.get_prefix()
|
||||
return new
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
"""Fixer for import statements.
|
||||
If spam is being imported from the local directory, this import:
|
||||
from spam import eggs
|
||||
Becomes:
|
||||
from .spam import eggs
|
||||
|
||||
And this import:
|
||||
import spam
|
||||
Becomes:
|
||||
import .spam
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
from . import basefix
|
||||
from os.path import dirname, join, exists, pathsep
|
||||
|
||||
class FixImport(basefix.BaseFix):
|
||||
|
||||
PATTERN = """
|
||||
import_from< 'from' imp=any 'import' any >
|
||||
|
|
||||
import_name< 'import' imp=any >
|
||||
"""
|
||||
|
||||
def transform(self, node, results):
|
||||
imp = results['imp']
|
||||
|
||||
if unicode(imp).startswith('.'):
|
||||
# Already a new-style import
|
||||
return
|
||||
|
||||
if not probably_a_local_import(unicode(imp), self.filename):
|
||||
# I guess this is a global import -- skip it!
|
||||
return
|
||||
|
||||
# Some imps are top-level (eg: 'import ham')
|
||||
# some are first level (eg: 'import ham.eggs')
|
||||
# some are third level (eg: 'import ham.eggs as spam')
|
||||
# Hence, the loop
|
||||
while not hasattr(imp, 'value'):
|
||||
imp = imp.children[0]
|
||||
|
||||
imp.value = "." + imp.value
|
||||
node.changed()
|
||||
return node
|
||||
|
||||
def probably_a_local_import(imp_name, file_path):
|
||||
# Must be stripped because the right space is included by the parser
|
||||
imp_name = imp_name.split('.', 1)[0].strip()
|
||||
base_path = dirname(file_path)
|
||||
base_path = join(base_path, imp_name)
|
||||
for ext in ['.py', pathsep, '.pyc', '.so', '.sl', '.pyd']:
|
||||
if exists(base_path + ext):
|
||||
return True
|
||||
return False
|
|
@ -1,6 +1,8 @@
|
|||
""" Fixer for itertools.(imap|ifilter|izip) --> (map|filter|zip) and
|
||||
itertools.ifilterfalse --> itertools.filterfalse (bugs 2360-2363)
|
||||
|
||||
imports from itertools are fixed in fix_itertools_import.py
|
||||
|
||||
If itertools is imported as something else (ie: import itertools as it;
|
||||
it.izip(spam, eggs)) method calls will not get fixed.
|
||||
"""
|
||||
|
@ -19,6 +21,9 @@ class FixItertools(basefix.BaseFix):
|
|||
power< func=%(it_funcs)s trailer< '(' [any] ')' > >
|
||||
""" %(locals())
|
||||
|
||||
# Needs to be run after fix_(map|zip|filter)
|
||||
run_order = 6
|
||||
|
||||
def transform(self, node, results):
|
||||
prefix = None
|
||||
func = results['func'][0]
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
""" Fixer for imports of itertools.(imap|ifilter|izip|ifilterfalse) """
|
||||
|
||||
# Local imports
|
||||
from . import basefix
|
||||
from .util import BlankLine
|
||||
|
||||
class FixItertoolsImports(basefix.BaseFix):
|
||||
PATTERN = """
|
||||
import_from< 'from' 'itertools' 'import' imports=any >
|
||||
""" %(locals())
|
||||
|
||||
def transform(self, node, results):
|
||||
imports = results['imports']
|
||||
children = imports.children[:] or [imports]
|
||||
for child in children:
|
||||
if not hasattr(child, 'value'):
|
||||
# Handle 'import ... as ...'
|
||||
continue
|
||||
if child.value in ('imap', 'izip', 'ifilter'):
|
||||
child.remove()
|
||||
elif child.value == 'ifilterfalse':
|
||||
node.changed()
|
||||
child.value = 'filterfalse'
|
||||
|
||||
# Make sure the import statement is still sane
|
||||
children = imports.children[:] or [imports]
|
||||
remove_comma = True
|
||||
for child in children:
|
||||
if remove_comma and getattr(child, 'value', None) == ',':
|
||||
child.remove()
|
||||
else:
|
||||
remove_comma ^= True
|
||||
|
||||
if unicode(children[-1]) == ',':
|
||||
children[-1].remove()
|
||||
|
||||
# If there is nothing left, return a blank line
|
||||
if not (imports.children or getattr(imports, 'value', None)):
|
||||
new = BlankLine()
|
||||
new.prefix = node.get_prefix()
|
||||
else:
|
||||
new = node
|
||||
return new
|
|
@ -22,10 +22,10 @@ soon as the shortest argument is exhausted.
|
|||
# Local imports
|
||||
from ..pgen2 import token
|
||||
from . import basefix
|
||||
from .util import Name, Call, ListComp, does_tree_import, in_special_context
|
||||
from .util import Name, Call, ListComp, in_special_context
|
||||
from ..pygram import python_symbols as syms
|
||||
|
||||
class FixMap(basefix.BaseFix):
|
||||
class FixMap(basefix.ConditionalFix):
|
||||
|
||||
PATTERN = """
|
||||
map_none=power<
|
||||
|
@ -54,20 +54,10 @@ class FixMap(basefix.BaseFix):
|
|||
>
|
||||
"""
|
||||
|
||||
def start_tree(self, *args):
|
||||
super(FixMap, self).start_tree(*args)
|
||||
self._future_map_found = None
|
||||
|
||||
def has_future_map(self, node):
|
||||
if self._future_map_found is not None:
|
||||
return self._future_map_found
|
||||
self._future_map_found = does_tree_import('future_builtins', 'map', node)
|
||||
return self._future_map_found
|
||||
skip_on = 'future_builtins.map'
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.has_future_map(node):
|
||||
# If a future map has been imported for this file, we won't
|
||||
# be making any modifications
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
if node.parent.type == syms.simple_stmt:
|
||||
|
|
|
@ -8,6 +8,9 @@ Change:
|
|||
'print ...' into 'print(...)'
|
||||
'print ... ,' into 'print(..., end=" ")'
|
||||
'print >>x, ...' into 'print(..., file=x)'
|
||||
|
||||
No changes are applied if print_function is imported from __future__
|
||||
|
||||
"""
|
||||
|
||||
# Local imports
|
||||
|
@ -23,14 +26,20 @@ parend_expr = patcomp.compile_pattern(
|
|||
)
|
||||
|
||||
|
||||
class FixPrint(basefix.BaseFix):
|
||||
class FixPrint(basefix.ConditionalFix):
|
||||
|
||||
PATTERN = """
|
||||
simple_stmt< bare='print' any > | print_stmt
|
||||
"""
|
||||
|
||||
skip_on = '__future__.print_function'
|
||||
|
||||
def transform(self, node, results):
|
||||
assert results
|
||||
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
bare_print = results.get("bare")
|
||||
|
||||
if bare_print:
|
||||
|
|
|
@ -9,33 +9,24 @@ iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
|
|||
|
||||
# Local imports
|
||||
from . import basefix
|
||||
from .util import Name, Call, does_tree_import, in_special_context
|
||||
from .util import Name, Call, in_special_context
|
||||
|
||||
class FixZip(basefix.BaseFix):
|
||||
class FixZip(basefix.ConditionalFix):
|
||||
|
||||
PATTERN = """
|
||||
power< 'zip' args=trailer< '(' [any] ')' >
|
||||
>
|
||||
"""
|
||||
|
||||
def start_tree(self, *args):
|
||||
super(FixZip, self).start_tree(*args)
|
||||
self._future_zip_found = None
|
||||
|
||||
def has_future_zip(self, node):
|
||||
if self._future_zip_found is not None:
|
||||
return self._future_zip_found
|
||||
self._future_zip_found = does_tree_import('future_builtins', 'zip', node)
|
||||
return self._future_zip_found
|
||||
skip_on = "future_builtins.zip"
|
||||
|
||||
def transform(self, node, results):
|
||||
if self.has_future_zip(node):
|
||||
# If a future zip has been imported for this file, we won't
|
||||
# be making any modifications
|
||||
if self.should_skip(node):
|
||||
return
|
||||
|
||||
if in_special_context(node):
|
||||
return None
|
||||
|
||||
new = node.clone()
|
||||
new.set_prefix("")
|
||||
new = Call(Name("list"), [new])
|
||||
|
|
|
@ -21,7 +21,7 @@ import logging
|
|||
import sys
|
||||
|
||||
# Pgen imports
|
||||
from . import grammar, parse, token, tokenize
|
||||
from . import grammar, parse, token, tokenize, pgen
|
||||
|
||||
|
||||
class Driver(object):
|
||||
|
@ -123,7 +123,6 @@ def load_grammar(gt="Grammar.txt", gp=None,
|
|||
gp = head + tail + ".".join(map(str, sys.version_info)) + ".pickle"
|
||||
if force or not _newer(gp, gt):
|
||||
logger.info("Generating grammar tables from %s", gt)
|
||||
from pgen2 import pgen
|
||||
g = pgen.generate_grammar(gt)
|
||||
if save:
|
||||
logger.info("Writing grammar tables to %s", gp)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Licensed to PSF under a Contributor Agreement.
|
||||
|
||||
# Pgen imports
|
||||
from pgen2 import grammar, token, tokenize
|
||||
from . import grammar, token, tokenize
|
||||
|
||||
class PgenGrammar(grammar.Grammar):
|
||||
pass
|
||||
|
|
|
@ -14,6 +14,17 @@ __author__ = "Guido van Rossum <guido@python.org>"
|
|||
|
||||
HUGE = 0x7FFFFFFF # maximum repeat count, default max
|
||||
|
||||
_type_reprs = {}
|
||||
def type_repr(type_num):
|
||||
global _type_reprs
|
||||
if not _type_reprs:
|
||||
from .pygram import python_symbols
|
||||
# printing tokens is possible but not as useful
|
||||
# from .pgen2 import token // token.__dict__.items():
|
||||
for name, val in python_symbols.__dict__.items():
|
||||
if type(val) == int: _type_reprs[val] = name
|
||||
return _type_reprs.setdefault(type_num, type_num)
|
||||
|
||||
|
||||
class Base(object):
|
||||
|
||||
|
@ -195,8 +206,8 @@ class Node(Base):
|
|||
|
||||
def __repr__(self):
|
||||
"""Returns a canonical string representation."""
|
||||
return "%s(%r, %r)" % (self.__class__.__name__,
|
||||
self.type,
|
||||
return "%s(%s, %r)" % (self.__class__.__name__,
|
||||
type_repr(self.type),
|
||||
self.children)
|
||||
|
||||
def __str__(self):
|
||||
|
@ -372,7 +383,7 @@ class BasePattern(object):
|
|||
return object.__new__(cls)
|
||||
|
||||
def __repr__(self):
|
||||
args = [self.type, self.content, self.name]
|
||||
args = [type_repr(self.type), self.content, self.name]
|
||||
while args and args[-1] is None:
|
||||
del args[-1]
|
||||
return "%s(%s)" % (self.__class__.__name__, ", ".join(map(repr, args)))
|
||||
|
|
|
@ -161,6 +161,9 @@ class RefactoringTool(object):
|
|||
post_order_fixers.append(fixer)
|
||||
else:
|
||||
raise ValueError("Illegal fixer order: %r" % fixer.order)
|
||||
|
||||
pre_order_fixers.sort(key=lambda x: x.run_order)
|
||||
post_order_fixers.sort(key=lambda x: x.run_order)
|
||||
return (pre_order_fixers, post_order_fixers)
|
||||
|
||||
def log_error(self, msg, *args, **kwds):
|
||||
|
|
|
@ -10,6 +10,7 @@ except ImportError:
|
|||
|
||||
# Python imports
|
||||
import unittest
|
||||
from os.path import dirname, pathsep
|
||||
|
||||
# Local imports
|
||||
from .. import pygram
|
||||
|
@ -28,6 +29,7 @@ class FixerTestCase(support.TestCase):
|
|||
options = Options(fix=[self.fixer], print_function=False)
|
||||
self.refactor = refactor.RefactoringTool(options)
|
||||
self.fixer_log = []
|
||||
self.filename = "<string>"
|
||||
|
||||
for order in (self.refactor.pre_order, self.refactor.post_order):
|
||||
for fixer in order:
|
||||
|
@ -36,7 +38,7 @@ class FixerTestCase(support.TestCase):
|
|||
def _check(self, before, after):
|
||||
before = support.reformat(before)
|
||||
after = support.reformat(after)
|
||||
tree = self.refactor.refactor_string(before, "<string>")
|
||||
tree = self.refactor.refactor_string(before, self.filename)
|
||||
self.failUnlessEqual(after, str(tree))
|
||||
return tree
|
||||
|
||||
|
@ -60,6 +62,21 @@ class FixerTestCase(support.TestCase):
|
|||
if not ignore_warnings:
|
||||
self.failUnlessEqual(self.fixer_log, [])
|
||||
|
||||
def assert_runs_after(self, *names):
|
||||
fix = [self.fixer]
|
||||
fix.extend(names)
|
||||
options = Options(fix=fix, print_function=False)
|
||||
r = refactor.RefactoringTool(options)
|
||||
(pre, post) = r.get_fixers()
|
||||
n = "fix_" + self.fixer
|
||||
if post and post[-1].__class__.__module__.endswith(n):
|
||||
# We're the last fixer to run
|
||||
return
|
||||
if pre and pre[-1].__class__.__module__.endswith(n) and not post:
|
||||
# We're the last in pre and post is empty
|
||||
return
|
||||
self.fail("Fixer run order (%s) is incorrect; %s should be last."\
|
||||
%(", ".join([x.__class__.__module__ for x in (pre+post)]), n))
|
||||
|
||||
class Test_ne(FixerTestCase):
|
||||
fixer = "ne"
|
||||
|
@ -412,6 +429,29 @@ class Test_print(FixerTestCase):
|
|||
a = """print(file=sys.stderr)"""
|
||||
self.check(b, a)
|
||||
|
||||
# With from __future__ import print_function
|
||||
def test_with_future_print_function(self):
|
||||
# XXX: These tests won't actually do anything until the parser
|
||||
# is fixed so it won't crash when it sees print(x=y).
|
||||
# When #2412 is fixed, the try/except block can be taken
|
||||
# out and the tests can be run like normal.
|
||||
try:
|
||||
s = "from __future__ import print_function\n"\
|
||||
"print('Hai!', end=' ')"
|
||||
self.unchanged(s)
|
||||
|
||||
b = "print 'Hello, world!'"
|
||||
a = "print('Hello, world!')"
|
||||
self.check(b, a)
|
||||
|
||||
s = "from __future__ import *\n"\
|
||||
"print('Hai!', end=' ')"
|
||||
self.unchanged(s)
|
||||
except:
|
||||
return
|
||||
else:
|
||||
self.assertFalse(True, "#2421 has been fixed -- printing tests "\
|
||||
"need to be updated!")
|
||||
|
||||
class Test_exec(FixerTestCase):
|
||||
fixer = "exec"
|
||||
|
@ -464,7 +504,6 @@ class Test_exec(FixerTestCase):
|
|||
s = """exec(code, ns1, ns2)"""
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_repr(FixerTestCase):
|
||||
fixer = "repr"
|
||||
|
||||
|
@ -666,7 +705,6 @@ class Test_except(FixerTestCase):
|
|||
pass"""
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_raise(FixerTestCase):
|
||||
fixer = "raise"
|
||||
|
||||
|
@ -789,7 +827,6 @@ class Test_raise(FixerTestCase):
|
|||
b = 6"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_throw(FixerTestCase):
|
||||
fixer = "throw"
|
||||
|
||||
|
@ -915,7 +952,6 @@ class Test_throw(FixerTestCase):
|
|||
b = 6"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_long(FixerTestCase):
|
||||
fixer = "long"
|
||||
|
||||
|
@ -961,7 +997,6 @@ class Test_long(FixerTestCase):
|
|||
a = """x = int( x )"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_dict(FixerTestCase):
|
||||
fixer = "dict"
|
||||
|
||||
|
@ -1171,7 +1206,6 @@ class Test_xrange(FixerTestCase):
|
|||
a = """for i in range(10):\n j=i"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_raw_input(FixerTestCase):
|
||||
fixer = "raw_input"
|
||||
|
||||
|
@ -1204,7 +1238,6 @@ class Test_raw_input(FixerTestCase):
|
|||
a = """x = input(foo(a) + 6)"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_funcattrs(FixerTestCase):
|
||||
fixer = "funcattrs"
|
||||
|
||||
|
@ -1231,7 +1264,6 @@ class Test_funcattrs(FixerTestCase):
|
|||
s = "f(foo.__%s__.foo)" % attr
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_xreadlines(FixerTestCase):
|
||||
fixer = "xreadlines"
|
||||
|
||||
|
@ -1274,7 +1306,6 @@ class Test_xreadlines(FixerTestCase):
|
|||
s = "foo(xreadlines)"
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_imports(FixerTestCase):
|
||||
fixer = "imports"
|
||||
|
||||
|
@ -1352,7 +1383,6 @@ class Test_imports(FixerTestCase):
|
|||
""" % (new, member, member, member)
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_input(FixerTestCase):
|
||||
fixer = "input"
|
||||
|
||||
|
@ -1400,7 +1430,6 @@ class Test_input(FixerTestCase):
|
|||
a = """x = eval(input(foo(5) + 9))"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_tuple_params(FixerTestCase):
|
||||
fixer = "tuple_params"
|
||||
|
||||
|
@ -1620,7 +1649,6 @@ class Test_methodattrs(FixerTestCase):
|
|||
s = "f(foo.__%s__.foo)" % attr
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_next(FixerTestCase):
|
||||
fixer = "next"
|
||||
|
||||
|
@ -2250,7 +2278,6 @@ class Test_renames(FixerTestCase):
|
|||
""" % (mod, new, mod, new)
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_unicode(FixerTestCase):
|
||||
fixer = "unicode"
|
||||
|
||||
|
@ -2904,7 +2931,6 @@ class Test_idioms(FixerTestCase):
|
|||
"""
|
||||
self.unchanged(s)
|
||||
|
||||
|
||||
class Test_basestring(FixerTestCase):
|
||||
fixer = "basestring"
|
||||
|
||||
|
@ -2913,7 +2939,6 @@ class Test_basestring(FixerTestCase):
|
|||
a = """isinstance(x, str)"""
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
class Test_buffer(FixerTestCase):
|
||||
fixer = "buffer"
|
||||
|
||||
|
@ -2930,6 +2955,17 @@ class Test_future(FixerTestCase):
|
|||
a = """"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """# comment\nfrom __future__ import braces"""
|
||||
a = """# comment\n"""
|
||||
self.check(b, a)
|
||||
|
||||
b = """from __future__ import braces\n# comment"""
|
||||
a = """\n# comment"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_run_order(self):
|
||||
self.assert_runs_after('print')
|
||||
|
||||
class Test_itertools(FixerTestCase):
|
||||
fixer = "itertools"
|
||||
|
||||
|
@ -2975,6 +3011,129 @@ class Test_itertools(FixerTestCase):
|
|||
a = """ itertools.filterfalse(a, b)"""
|
||||
self.check(b, a)
|
||||
|
||||
def test_run_order(self):
|
||||
self.assert_runs_after('map', 'zip', 'filter')
|
||||
|
||||
class Test_itertools_imports(FixerTestCase):
|
||||
fixer = 'itertools_imports'
|
||||
|
||||
def test_reduced(self):
|
||||
b = "from itertools import imap, izip, foo"
|
||||
a = "from itertools import foo"
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import bar, imap, izip, foo"
|
||||
a = "from itertools import bar, foo"
|
||||
self.check(b, a)
|
||||
|
||||
def test_comments(self):
|
||||
b = "#foo\nfrom itertools import imap, izip"
|
||||
a = "#foo\n"
|
||||
self.check(b, a)
|
||||
|
||||
def test_none(self):
|
||||
b = "from itertools import imap, izip"
|
||||
a = ""
|
||||
self.check(b, a)
|
||||
|
||||
def test_import_as(self):
|
||||
b = "from itertools import izip, bar as bang, imap"
|
||||
a = "from itertools import bar as bang"
|
||||
self.check(b, a)
|
||||
|
||||
s = "from itertools import bar as bang"
|
||||
self.unchanged(s)
|
||||
|
||||
def test_ifilter(self):
|
||||
b = "from itertools import ifilterfalse"
|
||||
a = "from itertools import filterfalse"
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import imap, ifilterfalse, foo"
|
||||
a = "from itertools import filterfalse, foo"
|
||||
self.check(b, a)
|
||||
|
||||
b = "from itertools import bar, ifilterfalse, foo"
|
||||
a = "from itertools import bar, filterfalse, foo"
|
||||
self.check(b, a)
|
||||
|
||||
|
||||
def test_unchanged(self):
|
||||
s = "from itertools import foo"
|
||||
self.unchanged(s)
|
||||
|
||||
class Test_import(FixerTestCase):
|
||||
fixer = "import"
|
||||
|
||||
def setUp(self):
|
||||
FixerTestCase.setUp(self)
|
||||
# Need to replace fix_import's isfile and isdir method
|
||||
# so we can check that it's doing the right thing
|
||||
self.files_checked = []
|
||||
self.always_exists = True
|
||||
def fake_exists(name):
|
||||
self.files_checked.append(name)
|
||||
return self.always_exists
|
||||
|
||||
from ..fixes import fix_import
|
||||
fix_import.exists = fake_exists
|
||||
|
||||
def check_both(self, b, a):
|
||||
self.always_exists = True
|
||||
FixerTestCase.check(self, b, a)
|
||||
self.always_exists = False
|
||||
FixerTestCase.unchanged(self, b)
|
||||
|
||||
def test_files_checked(self):
|
||||
def p(path):
|
||||
# Takes a unix path and returns a path with correct separators
|
||||
return pathsep.join(path.split("/"))
|
||||
|
||||
self.always_exists = False
|
||||
expected_extensions = ('.py', pathsep, '.pyc', '.so', '.sl', '.pyd')
|
||||
names_to_test = (p("/spam/eggs.py"), "ni.py", p("../../shrubbery.py"))
|
||||
|
||||
for name in names_to_test:
|
||||
self.files_checked = []
|
||||
self.filename = name
|
||||
self.unchanged("import jam")
|
||||
|
||||
if dirname(name): name = dirname(name) + '/jam'
|
||||
else: name = 'jam'
|
||||
expected_checks = set(name + ext for ext in expected_extensions)
|
||||
|
||||
self.failUnlessEqual(set(self.files_checked), expected_checks)
|
||||
|
||||
def test_from(self):
|
||||
b = "from foo import bar"
|
||||
a = "from .foo import bar"
|
||||
self.check_both(b, a)
|
||||
|
||||
def test_dotted_from(self):
|
||||
b = "from green.eggs import ham"
|
||||
a = "from .green.eggs import ham"
|
||||
self.check_both(b, a)
|
||||
|
||||
def test_from_as(self):
|
||||
b = "from green.eggs import ham as spam"
|
||||
a = "from .green.eggs import ham as spam"
|
||||
self.check_both(b, a)
|
||||
|
||||
def test_import(self):
|
||||
b = "import foo"
|
||||
a = "import .foo"
|
||||
self.check_both(b, a)
|
||||
|
||||
def test_dotted_import(self):
|
||||
b = "import foo.bar"
|
||||
a = "import .foo.bar"
|
||||
self.check_both(b, a)
|
||||
|
||||
def test_dotted_import_as(self):
|
||||
b = "import foo.bar as bang"
|
||||
a = "import .foo.bar as bang"
|
||||
self.check_both(b, a)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import __main__
|
||||
|
|
Loading…
Reference in New Issue