mirror of https://github.com/python/cpython
gh-58032: Do not use argparse.FileType in module CLIs and scripts (GH-113649)
Open and close files manually. It prevents from leaking files, preliminary creation of output files, and accidental closing of stdin and stdout.
This commit is contained in:
parent
a8629816c6
commit
b3d2427f22
14
Lib/ast.py
14
Lib/ast.py
|
@ -1812,8 +1812,7 @@ def main():
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(prog='python -m ast')
|
parser = argparse.ArgumentParser(prog='python -m ast')
|
||||||
parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
|
parser.add_argument('infile', nargs='?', default='-',
|
||||||
default='-',
|
|
||||||
help='the file to parse; defaults to stdin')
|
help='the file to parse; defaults to stdin')
|
||||||
parser.add_argument('-m', '--mode', default='exec',
|
parser.add_argument('-m', '--mode', default='exec',
|
||||||
choices=('exec', 'single', 'eval', 'func_type'),
|
choices=('exec', 'single', 'eval', 'func_type'),
|
||||||
|
@ -1827,9 +1826,14 @@ def main():
|
||||||
help='indentation of nodes (number of spaces)')
|
help='indentation of nodes (number of spaces)')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
with args.infile as infile:
|
if args.infile == '-':
|
||||||
source = infile.read()
|
name = '<stdin>'
|
||||||
tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
|
source = sys.stdin.buffer.read()
|
||||||
|
else:
|
||||||
|
name = args.infile
|
||||||
|
with open(args.infile, 'rb') as infile:
|
||||||
|
source = infile.read()
|
||||||
|
tree = parse(source, name, args.mode, type_comments=args.no_type_comments)
|
||||||
print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
|
print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
13
Lib/dis.py
13
Lib/dis.py
|
@ -1032,11 +1032,16 @@ def main():
|
||||||
help='show inline caches')
|
help='show inline caches')
|
||||||
parser.add_argument('-O', '--show-offsets', action='store_true',
|
parser.add_argument('-O', '--show-offsets', action='store_true',
|
||||||
help='show instruction offsets')
|
help='show instruction offsets')
|
||||||
parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
|
parser.add_argument('infile', nargs='?', default='-')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
with args.infile as infile:
|
if args.infile == '-':
|
||||||
source = infile.read()
|
name = '<stdin>'
|
||||||
code = compile(source, args.infile.name, "exec")
|
source = sys.stdin.buffer.read()
|
||||||
|
else:
|
||||||
|
name = args.infile
|
||||||
|
with open(args.infile, 'rb') as infile:
|
||||||
|
source = infile.read()
|
||||||
|
code = compile(source, name, "exec")
|
||||||
dis(code, show_caches=args.show_caches, show_offsets=args.show_offsets)
|
dis(code, show_caches=args.show_caches, show_offsets=args.show_offsets)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -13,7 +13,6 @@ Usage::
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
import json
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -22,11 +21,9 @@ def main():
|
||||||
'to validate and pretty-print JSON objects.')
|
'to validate and pretty-print JSON objects.')
|
||||||
parser = argparse.ArgumentParser(prog=prog, description=description)
|
parser = argparse.ArgumentParser(prog=prog, description=description)
|
||||||
parser.add_argument('infile', nargs='?',
|
parser.add_argument('infile', nargs='?',
|
||||||
type=argparse.FileType(encoding="utf-8"),
|
|
||||||
help='a JSON file to be validated or pretty-printed',
|
help='a JSON file to be validated or pretty-printed',
|
||||||
default=sys.stdin)
|
default='-')
|
||||||
parser.add_argument('outfile', nargs='?',
|
parser.add_argument('outfile', nargs='?',
|
||||||
type=Path,
|
|
||||||
help='write the output of infile to outfile',
|
help='write the output of infile to outfile',
|
||||||
default=None)
|
default=None)
|
||||||
parser.add_argument('--sort-keys', action='store_true', default=False,
|
parser.add_argument('--sort-keys', action='store_true', default=False,
|
||||||
|
@ -59,23 +56,30 @@ def main():
|
||||||
dump_args['indent'] = None
|
dump_args['indent'] = None
|
||||||
dump_args['separators'] = ',', ':'
|
dump_args['separators'] = ',', ':'
|
||||||
|
|
||||||
with options.infile as infile:
|
try:
|
||||||
|
if options.infile == '-':
|
||||||
|
infile = sys.stdin
|
||||||
|
else:
|
||||||
|
infile = open(options.infile, encoding='utf-8')
|
||||||
try:
|
try:
|
||||||
if options.json_lines:
|
if options.json_lines:
|
||||||
objs = (json.loads(line) for line in infile)
|
objs = (json.loads(line) for line in infile)
|
||||||
else:
|
else:
|
||||||
objs = (json.load(infile),)
|
objs = (json.load(infile),)
|
||||||
|
finally:
|
||||||
|
if infile is not sys.stdin:
|
||||||
|
infile.close()
|
||||||
|
|
||||||
if options.outfile is None:
|
if options.outfile is None:
|
||||||
out = sys.stdout
|
outfile = sys.stdout
|
||||||
else:
|
else:
|
||||||
out = options.outfile.open('w', encoding='utf-8')
|
outfile = open(options.outfile, 'w', encoding='utf-8')
|
||||||
with out as outfile:
|
with outfile:
|
||||||
for obj in objs:
|
for obj in objs:
|
||||||
json.dump(obj, outfile, **dump_args)
|
json.dump(obj, outfile, **dump_args)
|
||||||
outfile.write('\n')
|
outfile.write('\n')
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise SystemExit(e)
|
raise SystemExit(e)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -165,8 +165,8 @@ decimal_using_bytecode = _using_bytecode(decimal)
|
||||||
|
|
||||||
def main(import_, options):
|
def main(import_, options):
|
||||||
if options.source_file:
|
if options.source_file:
|
||||||
with options.source_file:
|
with open(options.source_file, 'r', encoding='utf-8') as source_file:
|
||||||
prev_results = json.load(options.source_file)
|
prev_results = json.load(source_file)
|
||||||
else:
|
else:
|
||||||
prev_results = {}
|
prev_results = {}
|
||||||
__builtins__.__import__ = import_
|
__builtins__.__import__ = import_
|
||||||
|
@ -218,8 +218,8 @@ def main(import_, options):
|
||||||
new_result/old_result)
|
new_result/old_result)
|
||||||
print(benchmark_name, ':', result)
|
print(benchmark_name, ':', result)
|
||||||
if options.dest_file:
|
if options.dest_file:
|
||||||
with options.dest_file:
|
with open(options.dest_file, 'w', encoding='utf-8') as dest_file:
|
||||||
json.dump(new_results, options.dest_file, indent=2)
|
json.dump(new_results, dest_file, indent=2)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -229,11 +229,9 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('-b', '--builtin', dest='builtin', action='store_true',
|
parser.add_argument('-b', '--builtin', dest='builtin', action='store_true',
|
||||||
default=False, help="use the built-in __import__")
|
default=False, help="use the built-in __import__")
|
||||||
parser.add_argument('-r', '--read', dest='source_file',
|
parser.add_argument('-r', '--read', dest='source_file',
|
||||||
type=argparse.FileType('r'),
|
|
||||||
help='file to read benchmark data from to compare '
|
help='file to read benchmark data from to compare '
|
||||||
'against')
|
'against')
|
||||||
parser.add_argument('-w', '--write', dest='dest_file',
|
parser.add_argument('-w', '--write', dest='dest_file',
|
||||||
type=argparse.FileType('w'),
|
|
||||||
help='file to write benchmark data to')
|
help='file to write benchmark data to')
|
||||||
parser.add_argument('--benchmark', dest='benchmark',
|
parser.add_argument('--benchmark', dest='benchmark',
|
||||||
help='specific benchmark to run')
|
help='specific benchmark to run')
|
||||||
|
|
|
@ -41,25 +41,24 @@ def main() -> None:
|
||||||
description="Generate the Lib/keywords.py file from the grammar."
|
description="Generate the Lib/keywords.py file from the grammar."
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"grammar", type=str, help="The file with the grammar definition in PEG format"
|
"grammar", help="The file with the grammar definition in PEG format"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"tokens_file", type=argparse.FileType("r"), help="The file with the token definitions"
|
"tokens_file", help="The file with the token definitions"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"keyword_file",
|
"keyword_file",
|
||||||
type=argparse.FileType("w"),
|
|
||||||
help="The path to write the keyword definitions",
|
help="The path to write the keyword definitions",
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
grammar, _, _ = build_parser(args.grammar)
|
grammar, _, _ = build_parser(args.grammar)
|
||||||
with args.tokens_file as tok_file:
|
with open(args.tokens_file) as tok_file:
|
||||||
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
|
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
|
||||||
gen = CParserGenerator(grammar, all_tokens, exact_tok, non_exact_tok, file=None)
|
gen = CParserGenerator(grammar, all_tokens, exact_tok, non_exact_tok, file=None)
|
||||||
gen.collect_rules()
|
gen.collect_rules()
|
||||||
|
|
||||||
with args.keyword_file as thefile:
|
with open(args.keyword_file, 'w') as thefile:
|
||||||
all_keywords = sorted(list(gen.keywords.keys()))
|
all_keywords = sorted(list(gen.keywords.keys()))
|
||||||
all_soft_keywords = sorted(gen.soft_keywords)
|
all_soft_keywords = sorted(gen.soft_keywords)
|
||||||
|
|
||||||
|
|
|
@ -1154,12 +1154,13 @@ def output_markdown(
|
||||||
print("Stats gathered on:", date.today(), file=out)
|
print("Stats gathered on:", date.today(), file=out)
|
||||||
|
|
||||||
|
|
||||||
def output_stats(inputs: list[Path], json_output=TextIO | None):
|
def output_stats(inputs: list[Path], json_output=str | None):
|
||||||
match len(inputs):
|
match len(inputs):
|
||||||
case 1:
|
case 1:
|
||||||
data = load_raw_data(Path(inputs[0]))
|
data = load_raw_data(Path(inputs[0]))
|
||||||
if json_output is not None:
|
if json_output is not None:
|
||||||
save_raw_data(data, json_output) # type: ignore
|
with open(json_output, 'w', encoding='utf-8') as f:
|
||||||
|
save_raw_data(data, f) # type: ignore
|
||||||
stats = Stats(data)
|
stats = Stats(data)
|
||||||
output_markdown(sys.stdout, LAYOUT, stats)
|
output_markdown(sys.stdout, LAYOUT, stats)
|
||||||
case 2:
|
case 2:
|
||||||
|
@ -1195,7 +1196,6 @@ def main():
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--json-output",
|
"--json-output",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
type=argparse.FileType("w"),
|
|
||||||
help="Output complete raw results to the given JSON file.",
|
help="Output complete raw results to the given JSON file.",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ parser = argparse.ArgumentParser(
|
||||||
)
|
)
|
||||||
parser.add_argument("srcdir", help="OpenSSL source directory")
|
parser.add_argument("srcdir", help="OpenSSL source directory")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"output", nargs="?", type=argparse.FileType("w"), default=sys.stdout
|
"output", nargs="?", default=None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,8 +126,13 @@ def main():
|
||||||
lines.append("")
|
lines.append("")
|
||||||
lines.extend(gen_error_codes(args))
|
lines.extend(gen_error_codes(args))
|
||||||
|
|
||||||
for line in lines:
|
if args.output is None:
|
||||||
args.output.write(line + "\n")
|
for line in lines:
|
||||||
|
print(line)
|
||||||
|
else:
|
||||||
|
with open(args.output, 'w') as output:
|
||||||
|
for line in lines:
|
||||||
|
print(line, file=output)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Reference in New Issue