2023-12-07 08:49:40 -04:00
|
|
|
"""Generate the main interpreter switch.
|
|
|
|
Reads the instruction definitions from bytecodes.c.
|
|
|
|
Writes the cases to generated_cases.c.h, which is #included in ceval.c.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
|
|
|
|
from analyzer import (
|
|
|
|
Analysis,
|
|
|
|
Instruction,
|
|
|
|
Uop,
|
|
|
|
Part,
|
|
|
|
analyze_files,
|
|
|
|
Skip,
|
2024-07-18 08:49:24 -03:00
|
|
|
Flush,
|
2023-12-07 08:49:40 -04:00
|
|
|
analysis_error,
|
2024-07-09 07:33:56 -03:00
|
|
|
StackItem,
|
2023-12-07 08:49:40 -04:00
|
|
|
)
|
2023-12-08 07:48:30 -04:00
|
|
|
from generators_common import (
|
|
|
|
DEFAULT_INPUT,
|
|
|
|
ROOT,
|
|
|
|
write_header,
|
2024-07-09 07:33:56 -03:00
|
|
|
type_and_null,
|
2024-08-06 09:04:33 -03:00
|
|
|
Emitter,
|
2024-10-07 10:56:39 -03:00
|
|
|
TokenIterator,
|
2023-12-08 07:48:30 -04:00
|
|
|
)
|
2023-12-07 08:49:40 -04:00
|
|
|
from cwriter import CWriter
|
2024-06-17 16:58:56 -03:00
|
|
|
from typing import TextIO
|
2024-10-07 10:56:39 -03:00
|
|
|
from stack import Local, Stack, StackError, get_stack_effect, Storage
|
2023-12-07 08:49:40 -04:00
|
|
|
|
|
|
|
|
2023-12-08 07:48:30 -04:00
|
|
|
DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h"
|
2023-12-07 08:49:40 -04:00
|
|
|
|
|
|
|
|
|
|
|
FOOTER = "#undef TIER_ONE\n"
|
|
|
|
|
2024-08-08 06:57:59 -03:00
|
|
|
|
2024-07-18 08:49:24 -03:00
|
|
|
def declare_variable(var: StackItem, out: CWriter) -> None:
|
|
|
|
type, null = type_and_null(var)
|
|
|
|
space = " " if type[-1].isalnum() else ""
|
|
|
|
if var.condition:
|
|
|
|
out.emit(f"{type}{space}{var.name} = {null};\n")
|
|
|
|
else:
|
|
|
|
out.emit(f"{type}{space}{var.name};\n")
|
2023-12-07 08:49:40 -04:00
|
|
|
|
|
|
|
|
2024-07-18 08:49:24 -03:00
|
|
|
def declare_variables(inst: Instruction, out: CWriter) -> None:
|
2024-08-01 05:27:26 -03:00
|
|
|
try:
|
|
|
|
stack = get_stack_effect(inst)
|
|
|
|
except StackError as ex:
|
2024-10-07 10:56:39 -03:00
|
|
|
raise analysis_error(ex.args[0], inst.where) from None
|
2024-07-18 08:49:24 -03:00
|
|
|
required = set(stack.defined)
|
2024-08-01 05:27:26 -03:00
|
|
|
required.discard("unused")
|
2024-07-18 08:49:24 -03:00
|
|
|
for part in inst.parts:
|
|
|
|
if not isinstance(part, Uop):
|
|
|
|
continue
|
|
|
|
for var in part.stack.inputs:
|
|
|
|
if var.name in required:
|
|
|
|
required.remove(var.name)
|
|
|
|
declare_variable(var, out)
|
|
|
|
for var in part.stack.outputs:
|
|
|
|
if var.name in required:
|
|
|
|
required.remove(var.name)
|
|
|
|
declare_variable(var, out)
|
2023-12-07 08:49:40 -04:00
|
|
|
|
2024-08-08 06:57:59 -03:00
|
|
|
|
2023-12-07 08:49:40 -04:00
|
|
|
def write_uop(
|
2024-08-08 06:57:59 -03:00
|
|
|
uop: Part,
|
|
|
|
emitter: Emitter,
|
|
|
|
offset: int,
|
|
|
|
stack: Stack,
|
|
|
|
inst: Instruction,
|
|
|
|
braces: bool,
|
2024-10-07 10:56:39 -03:00
|
|
|
) -> tuple[int, Stack]:
|
2023-12-07 08:49:40 -04:00
|
|
|
# out.emit(stack.as_comment() + "\n")
|
|
|
|
if isinstance(uop, Skip):
|
|
|
|
entries = "entries" if uop.size > 1 else "entry"
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.emit(f"/* Skip {uop.size} cache {entries} */\n")
|
2024-10-07 10:56:39 -03:00
|
|
|
return (offset + uop.size), stack
|
2024-07-18 08:49:24 -03:00
|
|
|
if isinstance(uop, Flush):
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.emit(f"// flush\n")
|
|
|
|
stack.flush(emitter.out)
|
2024-10-07 10:56:39 -03:00
|
|
|
return offset, stack
|
2023-12-07 08:49:40 -04:00
|
|
|
try:
|
2024-08-01 05:27:26 -03:00
|
|
|
locals: dict[str, Local] = {}
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.out.start_line()
|
2023-12-07 08:49:40 -04:00
|
|
|
if braces:
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.out.emit(f"// {uop.name}\n")
|
|
|
|
emitter.emit("{\n")
|
2024-10-07 10:56:39 -03:00
|
|
|
code_list, storage = Storage.for_uop(stack, uop)
|
|
|
|
emitter._print_storage(storage)
|
|
|
|
for code in code_list:
|
|
|
|
emitter.emit(code)
|
2024-08-01 05:27:26 -03:00
|
|
|
|
2023-12-07 08:49:40 -04:00
|
|
|
for cache in uop.caches:
|
|
|
|
if cache.name != "unused":
|
|
|
|
if cache.size == 4:
|
|
|
|
type = "PyObject *"
|
|
|
|
reader = "read_obj"
|
|
|
|
else:
|
|
|
|
type = f"uint{cache.size*16}_t "
|
|
|
|
reader = f"read_u{cache.size*16}"
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.emit(
|
2023-12-07 08:49:40 -04:00
|
|
|
f"{type}{cache.name} = {reader}(&this_instr[{offset}].cache);\n"
|
|
|
|
)
|
2024-02-29 22:53:32 -04:00
|
|
|
if inst.family is None:
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.emit(f"(void){cache.name};\n")
|
2023-12-07 08:49:40 -04:00
|
|
|
offset += cache.size
|
2024-10-07 10:56:39 -03:00
|
|
|
|
|
|
|
storage = emitter.emit_tokens(uop, storage, inst)
|
2023-12-07 08:49:40 -04:00
|
|
|
if braces:
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter.out.start_line()
|
|
|
|
emitter.emit("}\n")
|
|
|
|
# emitter.emit(stack.as_comment() + "\n")
|
2024-10-07 10:56:39 -03:00
|
|
|
return offset, storage.stack
|
2024-07-18 08:49:24 -03:00
|
|
|
except StackError as ex:
|
2024-08-01 05:27:26 -03:00
|
|
|
raise analysis_error(ex.args[0], uop.body[0])
|
2023-12-07 08:49:40 -04:00
|
|
|
|
|
|
|
|
|
|
|
def uses_this(inst: Instruction) -> bool:
|
|
|
|
if inst.properties.needs_this:
|
|
|
|
return True
|
|
|
|
for uop in inst.parts:
|
2024-07-18 08:49:24 -03:00
|
|
|
if not isinstance(uop, Uop):
|
2023-12-07 08:49:40 -04:00
|
|
|
continue
|
|
|
|
for cache in uop.caches:
|
|
|
|
if cache.name != "unused":
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def generate_tier1(
|
2023-12-12 08:12:17 -04:00
|
|
|
filenames: list[str], analysis: Analysis, outfile: TextIO, lines: bool
|
2023-12-07 08:49:40 -04:00
|
|
|
) -> None:
|
2023-12-08 07:48:30 -04:00
|
|
|
write_header(__file__, filenames, outfile)
|
|
|
|
outfile.write(
|
|
|
|
"""
|
|
|
|
#ifdef TIER_TWO
|
|
|
|
#error "This file is for Tier 1 only"
|
|
|
|
#endif
|
|
|
|
#define TIER_ONE 1
|
|
|
|
"""
|
|
|
|
)
|
2023-12-07 08:49:40 -04:00
|
|
|
out = CWriter(outfile, 2, lines)
|
2024-08-06 09:04:33 -03:00
|
|
|
emitter = Emitter(out)
|
2023-12-07 08:49:40 -04:00
|
|
|
out.emit("\n")
|
|
|
|
for name, inst in sorted(analysis.instructions.items()):
|
|
|
|
needs_this = uses_this(inst)
|
|
|
|
out.emit("\n")
|
|
|
|
out.emit(f"TARGET({name}) {{\n")
|
2024-02-29 22:53:32 -04:00
|
|
|
unused_guard = "(void)this_instr;\n" if inst.family is None else ""
|
2024-07-26 08:24:12 -03:00
|
|
|
if inst.properties.needs_prev:
|
2024-10-09 09:54:39 -03:00
|
|
|
out.emit(f"_Py_CODEUNIT* const prev_instr = frame->instr_ptr;\n")
|
2023-12-07 08:49:40 -04:00
|
|
|
if needs_this and not inst.is_target:
|
2024-10-09 09:54:39 -03:00
|
|
|
out.emit(f"_Py_CODEUNIT* const this_instr = frame->instr_ptr = next_instr;\n")
|
2024-02-29 22:53:32 -04:00
|
|
|
out.emit(unused_guard)
|
2023-12-07 08:49:40 -04:00
|
|
|
else:
|
|
|
|
out.emit(f"frame->instr_ptr = next_instr;\n")
|
|
|
|
out.emit(f"next_instr += {inst.size};\n")
|
|
|
|
out.emit(f"INSTRUCTION_STATS({name});\n")
|
|
|
|
if inst.is_target:
|
|
|
|
out.emit(f"PREDICTED({name});\n")
|
|
|
|
if needs_this:
|
2024-10-09 09:54:39 -03:00
|
|
|
out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n")
|
2024-02-29 22:53:32 -04:00
|
|
|
out.emit(unused_guard)
|
2023-12-07 08:49:40 -04:00
|
|
|
if inst.family is not None:
|
|
|
|
out.emit(
|
|
|
|
f"static_assert({inst.family.size} == {inst.size-1}"
|
|
|
|
', "incorrect cache size");\n'
|
|
|
|
)
|
|
|
|
declare_variables(inst, out)
|
|
|
|
offset = 1 # The instruction itself
|
|
|
|
stack = Stack()
|
|
|
|
for part in inst.parts:
|
|
|
|
# Only emit braces if more than one uop
|
2023-12-18 09:16:45 -04:00
|
|
|
insert_braces = len([p for p in inst.parts if isinstance(p, Uop)]) > 1
|
2024-10-07 10:56:39 -03:00
|
|
|
offset, stack = write_uop(part, emitter, offset, stack, inst, insert_braces)
|
2023-12-07 08:49:40 -04:00
|
|
|
out.start_line()
|
2024-10-07 10:56:39 -03:00
|
|
|
|
|
|
|
stack.flush(out)
|
2023-12-07 08:49:40 -04:00
|
|
|
if not inst.parts[-1].properties.always_exits:
|
|
|
|
out.emit("DISPATCH();\n")
|
|
|
|
out.start_line()
|
|
|
|
out.emit("}")
|
|
|
|
out.emit("\n")
|
|
|
|
outfile.write(FOOTER)
|
|
|
|
|
|
|
|
|
|
|
|
arg_parser = argparse.ArgumentParser(
|
|
|
|
description="Generate the code for the interpreter switch.",
|
|
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
|
|
)
|
|
|
|
|
|
|
|
arg_parser.add_argument(
|
|
|
|
"-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT
|
|
|
|
)
|
|
|
|
|
|
|
|
arg_parser.add_argument(
|
|
|
|
"-l", "--emit-line-directives", help="Emit #line directives", action="store_true"
|
|
|
|
)
|
|
|
|
|
|
|
|
arg_parser.add_argument(
|
|
|
|
"input", nargs=argparse.REMAINDER, help="Instruction definition file(s)"
|
|
|
|
)
|
|
|
|
|
2023-12-18 07:14:40 -04:00
|
|
|
|
|
|
|
def generate_tier1_from_files(
|
|
|
|
filenames: list[str], outfilename: str, lines: bool
|
|
|
|
) -> None:
|
|
|
|
data = analyze_files(filenames)
|
|
|
|
with open(outfilename, "w") as outfile:
|
|
|
|
generate_tier1(filenames, data, outfile, lines)
|
|
|
|
|
2023-12-20 10:27:25 -04:00
|
|
|
|
2023-12-07 08:49:40 -04:00
|
|
|
if __name__ == "__main__":
|
|
|
|
args = arg_parser.parse_args()
|
|
|
|
if len(args.input) == 0:
|
2023-12-12 08:12:17 -04:00
|
|
|
args.input.append(DEFAULT_INPUT)
|
2023-12-07 08:49:40 -04:00
|
|
|
data = analyze_files(args.input)
|
|
|
|
with open(args.output, "w") as outfile:
|
|
|
|
generate_tier1(args.input, data, outfile, args.emit_line_directives)
|