mirror of https://github.com/python/cpython
Merged revisions 88535,88661 via svnmerge from
svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3 ........ r88535 | brett.cannon | 2011-02-23 13:46:46 -0600 (Wed, 23 Feb 2011) | 1 line Add lib2to3.__main__ for easy testing from the console. ........ r88661 | benjamin.peterson | 2011-02-26 16:06:24 -0600 (Sat, 26 Feb 2011) | 6 lines fix refactoring on formfeed characters #11250 This is because text.splitlines() is not the same as list(StringIO.StringIO(text)). ........
This commit is contained in:
parent
aeb187a22b
commit
8059e1e214
|
@ -0,0 +1,4 @@
|
||||||
|
import sys
|
||||||
|
from .main import main
|
||||||
|
|
||||||
|
sys.exit(main("lib2to3.fixes"))
|
|
@ -12,6 +12,7 @@ __author__ = "Guido van Rossum <guido@python.org>"
|
||||||
|
|
||||||
# Python imports
|
# Python imports
|
||||||
import os
|
import os
|
||||||
|
import StringIO
|
||||||
|
|
||||||
# Fairly local imports
|
# Fairly local imports
|
||||||
from .pgen2 import driver, literals, token, tokenize, parse, grammar
|
from .pgen2 import driver, literals, token, tokenize, parse, grammar
|
||||||
|
@ -32,7 +33,7 @@ class PatternSyntaxError(Exception):
|
||||||
def tokenize_wrapper(input):
|
def tokenize_wrapper(input):
|
||||||
"""Tokenizes a string suppressing significant whitespace."""
|
"""Tokenizes a string suppressing significant whitespace."""
|
||||||
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
|
skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
|
||||||
tokens = tokenize.generate_tokens(driver.generate_lines(input).next)
|
tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
|
||||||
for quintuple in tokens:
|
for quintuple in tokens:
|
||||||
type, value, start, end, line_text = quintuple
|
type, value, start, end, line_text = quintuple
|
||||||
if type not in skip:
|
if type not in skip:
|
||||||
|
|
|
@ -19,6 +19,7 @@ __all__ = ["Driver", "load_grammar"]
|
||||||
import codecs
|
import codecs
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
import StringIO
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Pgen imports
|
# Pgen imports
|
||||||
|
@ -101,18 +102,10 @@ class Driver(object):
|
||||||
|
|
||||||
def parse_string(self, text, debug=False):
|
def parse_string(self, text, debug=False):
|
||||||
"""Parse a string and return the syntax tree."""
|
"""Parse a string and return the syntax tree."""
|
||||||
tokens = tokenize.generate_tokens(generate_lines(text).next)
|
tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
|
||||||
return self.parse_tokens(tokens, debug)
|
return self.parse_tokens(tokens, debug)
|
||||||
|
|
||||||
|
|
||||||
def generate_lines(text):
|
|
||||||
"""Generator that behaves like readline without using StringIO."""
|
|
||||||
for line in text.splitlines(True):
|
|
||||||
yield line
|
|
||||||
while True:
|
|
||||||
yield ""
|
|
||||||
|
|
||||||
|
|
||||||
def load_grammar(gt="Grammar.txt", gp=None,
|
def load_grammar(gt="Grammar.txt", gp=None,
|
||||||
save=True, force=False, logger=None):
|
save=True, force=False, logger=None):
|
||||||
"""Load the grammar (maybe from a pickle)."""
|
"""Load the grammar (maybe from a pickle)."""
|
||||||
|
|
|
@ -19,6 +19,16 @@ import sys
|
||||||
# Local imports
|
# Local imports
|
||||||
from lib2to3.pgen2 import tokenize
|
from lib2to3.pgen2 import tokenize
|
||||||
from ..pgen2.parse import ParseError
|
from ..pgen2.parse import ParseError
|
||||||
|
from lib2to3.pygram import python_symbols as syms
|
||||||
|
|
||||||
|
|
||||||
|
class TestDriver(support.TestCase):
|
||||||
|
|
||||||
|
def test_formfeed(self):
|
||||||
|
s = """print 1\n\x0Cprint 2\n"""
|
||||||
|
t = driver.parse_string(s)
|
||||||
|
self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
|
||||||
|
self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
|
||||||
|
|
||||||
|
|
||||||
class GrammarTest(support.TestCase):
|
class GrammarTest(support.TestCase):
|
||||||
|
|
Loading…
Reference in New Issue