2020-06-11 09:45:15 -03:00
|
|
|
"""Generate Lib/keyword.py from the Grammar and Tokens files using pgen"""
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
from .build import build_parser, generate_token_definitions
|
|
|
|
from .c_generator import CParserGenerator
|
|
|
|
|
|
|
|
TEMPLATE = r'''
|
|
|
|
"""Keywords (from "Grammar/python.gram")
|
|
|
|
|
|
|
|
This file is automatically generated; please don't muck it up!
|
|
|
|
|
|
|
|
To update the symbols in this file, 'cd' to the top directory of
|
|
|
|
the python source tree and run:
|
|
|
|
|
|
|
|
PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
|
|
|
|
Grammar/Grammar \
|
|
|
|
Grammar/Tokens \
|
|
|
|
Lib/keyword.py
|
|
|
|
|
|
|
|
Alternatively, you can run 'make regen-keyword'.
|
|
|
|
"""
|
|
|
|
|
2020-06-14 23:55:15 -03:00
|
|
|
__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
|
2020-06-11 09:45:15 -03:00
|
|
|
|
|
|
|
kwlist = [
|
2020-06-14 23:55:15 -03:00
|
|
|
{keywords}
|
|
|
|
]
|
|
|
|
|
|
|
|
softkwlist = [
|
|
|
|
{soft_keywords}
|
2020-06-11 09:45:15 -03:00
|
|
|
]
|
|
|
|
|
|
|
|
iskeyword = frozenset(kwlist).__contains__
|
2020-06-14 23:55:15 -03:00
|
|
|
issoftkeyword = frozenset(softkwlist).__contains__
|
2020-06-11 09:45:15 -03:00
|
|
|
'''.lstrip()
|
|
|
|
|
|
|
|
EXTRA_KEYWORDS = ["async", "await"]
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description="Generate the Lib/keywords.py file from the grammar."
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"grammar", type=str, help="The file with the grammar definition in PEG format"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2020-06-11 21:55:35 -03:00
|
|
|
"tokens_file", type=argparse.FileType("r"), help="The file with the token definitions"
|
2020-06-11 09:45:15 -03:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"keyword_file",
|
|
|
|
type=argparse.FileType("w"),
|
|
|
|
help="The path to write the keyword definitions",
|
|
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
grammar, _, _ = build_parser(args.grammar)
|
|
|
|
with args.tokens_file as tok_file:
|
|
|
|
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
|
|
|
|
gen: ParserGenerator = CParserGenerator(
|
|
|
|
grammar, all_tokens, exact_tok, non_exact_tok, file=None
|
|
|
|
)
|
|
|
|
gen.collect_todo()
|
|
|
|
|
|
|
|
with args.keyword_file as thefile:
|
2020-06-11 21:55:35 -03:00
|
|
|
all_keywords = sorted(list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS)
|
2020-06-14 23:55:15 -03:00
|
|
|
all_soft_keywords = sorted(gen.callmakervisitor.soft_keywords)
|
2020-06-11 09:45:15 -03:00
|
|
|
|
2020-06-14 23:55:15 -03:00
|
|
|
keywords = " " + ",\n ".join(map(repr, all_keywords))
|
|
|
|
soft_keywords = " " + ",\n ".join(map(repr, all_soft_keywords))
|
|
|
|
thefile.write(TEMPLATE.format(keywords=keywords, soft_keywords=soft_keywords))
|
2020-06-11 09:45:15 -03:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|