gh-58032: Do not use argparse.FileType in module CLIs and scripts (GH-113649)

Open and close files manually. It prevents from leaking files,
preliminary creation of output files, and accidental closing of stdin
and stdout.
This commit is contained in:
Serhiy Storchaka 2024-01-10 15:07:19 +02:00 committed by GitHub
parent a8629816c6
commit b3d2427f22
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 56 additions and 41 deletions

View File

@ -1812,8 +1812,7 @@ def main():
import argparse
parser = argparse.ArgumentParser(prog='python -m ast')
parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
default='-',
parser.add_argument('infile', nargs='?', default='-',
help='the file to parse; defaults to stdin')
parser.add_argument('-m', '--mode', default='exec',
choices=('exec', 'single', 'eval', 'func_type'),
@ -1827,9 +1826,14 @@ def main():
help='indentation of nodes (number of spaces)')
args = parser.parse_args()
with args.infile as infile:
source = infile.read()
tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
if args.infile == '-':
name = '<stdin>'
source = sys.stdin.buffer.read()
else:
name = args.infile
with open(args.infile, 'rb') as infile:
source = infile.read()
tree = parse(source, name, args.mode, type_comments=args.no_type_comments)
print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
if __name__ == '__main__':

View File

@ -1032,11 +1032,16 @@ def main():
help='show inline caches')
parser.add_argument('-O', '--show-offsets', action='store_true',
help='show instruction offsets')
parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
parser.add_argument('infile', nargs='?', default='-')
args = parser.parse_args()
with args.infile as infile:
source = infile.read()
code = compile(source, args.infile.name, "exec")
if args.infile == '-':
name = '<stdin>'
source = sys.stdin.buffer.read()
else:
name = args.infile
with open(args.infile, 'rb') as infile:
source = infile.read()
code = compile(source, name, "exec")
dis(code, show_caches=args.show_caches, show_offsets=args.show_offsets)
if __name__ == "__main__":

View File

@ -13,7 +13,6 @@ Usage::
import argparse
import json
import sys
from pathlib import Path
def main():
@ -22,11 +21,9 @@ def main():
'to validate and pretty-print JSON objects.')
parser = argparse.ArgumentParser(prog=prog, description=description)
parser.add_argument('infile', nargs='?',
type=argparse.FileType(encoding="utf-8"),
help='a JSON file to be validated or pretty-printed',
default=sys.stdin)
default='-')
parser.add_argument('outfile', nargs='?',
type=Path,
help='write the output of infile to outfile',
default=None)
parser.add_argument('--sort-keys', action='store_true', default=False,
@ -59,23 +56,30 @@ def main():
dump_args['indent'] = None
dump_args['separators'] = ',', ':'
with options.infile as infile:
try:
if options.infile == '-':
infile = sys.stdin
else:
infile = open(options.infile, encoding='utf-8')
try:
if options.json_lines:
objs = (json.loads(line) for line in infile)
else:
objs = (json.load(infile),)
finally:
if infile is not sys.stdin:
infile.close()
if options.outfile is None:
out = sys.stdout
else:
out = options.outfile.open('w', encoding='utf-8')
with out as outfile:
for obj in objs:
json.dump(obj, outfile, **dump_args)
outfile.write('\n')
except ValueError as e:
raise SystemExit(e)
if options.outfile is None:
outfile = sys.stdout
else:
outfile = open(options.outfile, 'w', encoding='utf-8')
with outfile:
for obj in objs:
json.dump(obj, outfile, **dump_args)
outfile.write('\n')
except ValueError as e:
raise SystemExit(e)
if __name__ == '__main__':

View File

@ -165,8 +165,8 @@ decimal_using_bytecode = _using_bytecode(decimal)
def main(import_, options):
if options.source_file:
with options.source_file:
prev_results = json.load(options.source_file)
with open(options.source_file, 'r', encoding='utf-8') as source_file:
prev_results = json.load(source_file)
else:
prev_results = {}
__builtins__.__import__ = import_
@ -218,8 +218,8 @@ def main(import_, options):
new_result/old_result)
print(benchmark_name, ':', result)
if options.dest_file:
with options.dest_file:
json.dump(new_results, options.dest_file, indent=2)
with open(options.dest_file, 'w', encoding='utf-8') as dest_file:
json.dump(new_results, dest_file, indent=2)
if __name__ == '__main__':
@ -229,11 +229,9 @@ if __name__ == '__main__':
parser.add_argument('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
parser.add_argument('-r', '--read', dest='source_file',
type=argparse.FileType('r'),
help='file to read benchmark data from to compare '
'against')
parser.add_argument('-w', '--write', dest='dest_file',
type=argparse.FileType('w'),
help='file to write benchmark data to')
parser.add_argument('--benchmark', dest='benchmark',
help='specific benchmark to run')

View File

@ -41,25 +41,24 @@ def main() -> None:
description="Generate the Lib/keywords.py file from the grammar."
)
parser.add_argument(
"grammar", type=str, help="The file with the grammar definition in PEG format"
"grammar", help="The file with the grammar definition in PEG format"
)
parser.add_argument(
"tokens_file", type=argparse.FileType("r"), help="The file with the token definitions"
"tokens_file", help="The file with the token definitions"
)
parser.add_argument(
"keyword_file",
type=argparse.FileType("w"),
help="The path to write the keyword definitions",
)
args = parser.parse_args()
grammar, _, _ = build_parser(args.grammar)
with args.tokens_file as tok_file:
with open(args.tokens_file) as tok_file:
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
gen = CParserGenerator(grammar, all_tokens, exact_tok, non_exact_tok, file=None)
gen.collect_rules()
with args.keyword_file as thefile:
with open(args.keyword_file, 'w') as thefile:
all_keywords = sorted(list(gen.keywords.keys()))
all_soft_keywords = sorted(gen.soft_keywords)

View File

@ -1154,12 +1154,13 @@ def output_markdown(
print("Stats gathered on:", date.today(), file=out)
def output_stats(inputs: list[Path], json_output=TextIO | None):
def output_stats(inputs: list[Path], json_output=str | None):
match len(inputs):
case 1:
data = load_raw_data(Path(inputs[0]))
if json_output is not None:
save_raw_data(data, json_output) # type: ignore
with open(json_output, 'w', encoding='utf-8') as f:
save_raw_data(data, f) # type: ignore
stats = Stats(data)
output_markdown(sys.stdout, LAYOUT, stats)
case 2:
@ -1195,7 +1196,6 @@ def main():
parser.add_argument(
"--json-output",
nargs="?",
type=argparse.FileType("w"),
help="Output complete raw results to the given JSON file.",
)

View File

@ -23,7 +23,7 @@ parser = argparse.ArgumentParser(
)
parser.add_argument("srcdir", help="OpenSSL source directory")
parser.add_argument(
"output", nargs="?", type=argparse.FileType("w"), default=sys.stdout
"output", nargs="?", default=None
)
@ -126,8 +126,13 @@ def main():
lines.append("")
lines.extend(gen_error_codes(args))
for line in lines:
args.output.write(line + "\n")
if args.output is None:
for line in lines:
print(line)
else:
with open(args.output, 'w') as output:
for line in lines:
print(line, file=output)
if __name__ == "__main__":