From 8059e1e2140d08683429a6731ecf4b1d2385cce3 Mon Sep 17 00:00:00 2001 From: Benjamin Peterson Date: Sat, 26 Feb 2011 22:11:02 +0000 Subject: [PATCH] Merged revisions 88535,88661 via svnmerge from svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r88535 | brett.cannon | 2011-02-23 13:46:46 -0600 (Wed, 23 Feb 2011) | 1 line Add lib2to3.__main__ for easy testing from the console. ........ r88661 | benjamin.peterson | 2011-02-26 16:06:24 -0600 (Sat, 26 Feb 2011) | 6 lines fix refactoring on formfeed characters #11250 This is because text.splitlines() is not the same as list(StringIO.StringIO(text)). ........ --- Lib/lib2to3/__main__.py | 4 ++++ Lib/lib2to3/patcomp.py | 3 ++- Lib/lib2to3/pgen2/driver.py | 11 ++--------- Lib/lib2to3/tests/test_parser.py | 10 ++++++++++ 4 files changed, 18 insertions(+), 10 deletions(-) create mode 100644 Lib/lib2to3/__main__.py diff --git a/Lib/lib2to3/__main__.py b/Lib/lib2to3/__main__.py new file mode 100644 index 00000000000..80688baf27a --- /dev/null +++ b/Lib/lib2to3/__main__.py @@ -0,0 +1,4 @@ +import sys +from .main import main + +sys.exit(main("lib2to3.fixes")) diff --git a/Lib/lib2to3/patcomp.py b/Lib/lib2to3/patcomp.py index 84fee5b2410..093e5f9f8df 100644 --- a/Lib/lib2to3/patcomp.py +++ b/Lib/lib2to3/patcomp.py @@ -12,6 +12,7 @@ __author__ = "Guido van Rossum " # Python imports import os +import StringIO # Fairly local imports from .pgen2 import driver, literals, token, tokenize, parse, grammar @@ -32,7 +33,7 @@ class PatternSyntaxError(Exception): def tokenize_wrapper(input): """Tokenizes a string suppressing significant whitespace.""" skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) - tokens = tokenize.generate_tokens(driver.generate_lines(input).next) + tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline) for quintuple in tokens: type, value, start, end, line_text = quintuple if type not in skip: diff --git a/Lib/lib2to3/pgen2/driver.py b/Lib/lib2to3/pgen2/driver.py index 6b3825e0556..16adec057fe 100644 --- a/Lib/lib2to3/pgen2/driver.py +++ b/Lib/lib2to3/pgen2/driver.py @@ -19,6 +19,7 @@ __all__ = ["Driver", "load_grammar"] import codecs import os import logging +import StringIO import sys # Pgen imports @@ -101,18 +102,10 @@ class Driver(object): def parse_string(self, text, debug=False): """Parse a string and return the syntax tree.""" - tokens = tokenize.generate_tokens(generate_lines(text).next) + tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline) return self.parse_tokens(tokens, debug) -def generate_lines(text): - """Generator that behaves like readline without using StringIO.""" - for line in text.splitlines(True): - yield line - while True: - yield "" - - def load_grammar(gt="Grammar.txt", gp=None, save=True, force=False, logger=None): """Load the grammar (maybe from a pickle).""" diff --git a/Lib/lib2to3/tests/test_parser.py b/Lib/lib2to3/tests/test_parser.py index 703d879af0c..26023813516 100644 --- a/Lib/lib2to3/tests/test_parser.py +++ b/Lib/lib2to3/tests/test_parser.py @@ -19,6 +19,16 @@ import sys # Local imports from lib2to3.pgen2 import tokenize from ..pgen2.parse import ParseError +from lib2to3.pygram import python_symbols as syms + + +class TestDriver(support.TestCase): + + def test_formfeed(self): + s = """print 1\n\x0Cprint 2\n""" + t = driver.parse_string(s) + self.assertEqual(t.children[0].children[0].type, syms.print_stmt) + self.assertEqual(t.children[1].children[0].type, syms.print_stmt) class GrammarTest(support.TestCase):