Fixes #10639: reindent.py should not convert newlines

Backport of changeset 070dc6e359fb, reindent.py now will use the newline detected in the original file and will report an error if mixed newlines are encountered.
This commit is contained in:
Jason R. Coombs 2011-07-27 14:05:37 -04:00
parent 0cf7e25c28
commit fee7745ebb
2 changed files with 20 additions and 3 deletions

View File

@ -143,6 +143,12 @@ Build
functions (*BSD and OS X). Also add new stat file flags for OS X
(UF_HIDDEN and UF_COMPRESSED).
Tools/Demos
-----------
- Issue #10639: reindent.py no longer converts newlines and will raise
an error if attempting to convert a file with mixed newlines.
Tests
-----

View File

@ -35,7 +35,7 @@ tabnanny.py, reindent should do a good job.
The backup file is a copy of the one that is being reindented. The ".bak"
file is generated with shutil.copy(), but some corner cases regarding
user/group and permissions could leave the backup file more readable that
user/group and permissions could leave the backup file more readable than
you'd prefer. You can always use the --nobackup option to prevent this.
"""
@ -44,6 +44,7 @@ __version__ = "1"
import tokenize
import os, shutil
import sys
import io
verbose = 0
recurse = 0
@ -108,13 +109,19 @@ def check(file):
if verbose:
print "checking", file, "...",
try:
f = open(file)
f = io.open(file)
except IOError, msg:
errprint("%s: I/O Error: %s" % (file, str(msg)))
return
r = Reindenter(f)
f.close()
newline = r.newlines
if isinstance(newline, tuple):
errprint("%s: mixed newlines detected; cannot process file" % file)
return
if r.run():
if verbose:
print "changed."
@ -126,7 +133,7 @@ def check(file):
shutil.copyfile(file, bak)
if verbose:
print "backed up", file, "to", bak
f = open(file, "w")
f = io.open(file, "w", newline=newline)
r.write(f)
f.close()
if verbose:
@ -173,6 +180,10 @@ class Reindenter:
# indeed, they're our headache!
self.stats = []
# Save the newlines found in the file so they can be used to
# create output without mutating the newlines.
self.newlines = f.newlines
def run(self):
tokenize.tokenize(self.getline, self.tokeneater)
# Remove trailing empty lines.