Merge with 3.2 Issue #10639: reindent.py should not convert newlines.

reindent.py now will use the newline detected in the original file and will report an error if mixed newlines are encountered.
This commit is contained in:
Jason R. Coombs 2011-07-26 11:57:08 -04:00
commit 1322bd4971
2 changed files with 16 additions and 4 deletions

View File

@ -1073,6 +1073,9 @@ Tools/Demos
- Issue #11179: Make ccbench work under Python 3.1 and 2.7 again.
- Issue #10639: reindent.py no longer converts newlines and will raise
an error if attempting to convert a file with mixed newlines.
Extension Modules
-----------------
@ -6661,4 +6664,4 @@ Docs
----
**(For information about older versions, consult the HISTORY file.)**
**(For information about older versions, consult the HISTORY file.)**

View File

@ -35,7 +35,7 @@ tabnanny.py, reindent should do a good job.
The backup file is a copy of the one that is being reindented. The ".bak"
file is generated with shutil.copy(), but some corner cases regarding
user/group and permissions could leave the backup file more readable that
user/group and permissions could leave the backup file more readable than
you'd prefer. You can always use the --nobackup option to prevent this.
"""
@ -109,7 +109,7 @@ def check(file):
if verbose:
print("checking", file, "...", end=' ')
with open(file, 'rb') as f:
with open(file, 'rb') as f:
encoding, _ = tokenize.detect_encoding(f.readline)
try:
with open(file, encoding=encoding) as f:
@ -118,6 +118,11 @@ def check(file):
errprint("%s: I/O Error: %s" % (file, str(msg)))
return
newline = r.newlines
if isinstance(newline, tuple):
errprint("%s: mixed newlines detected; cannot process file" % file)
return
if r.run():
if verbose:
print("changed.")
@ -129,7 +134,7 @@ def check(file):
shutil.copyfile(file, bak)
if verbose:
print("backed up", file, "to", bak)
with open(file, "w", encoding=encoding) as f:
with open(file, "w", encoding=encoding, newline=newline) as f:
r.write(f)
if verbose:
print("wrote new", file)
@ -177,6 +182,10 @@ class Reindenter:
# indeed, they're our headache!
self.stats = []
# Save the newlines found in the file so they can be used to
# create output without mutating the newlines.
self.newlines = f.newlines
def run(self):
tokens = tokenize.generate_tokens(self.getline)
for _token in tokens: