2022-11-17 21:06:07 -04:00
|
|
|
"""Generate the main interpreter switch.
|
2022-11-03 01:31:26 -03:00
|
|
|
|
2022-11-17 21:06:07 -04:00
|
|
|
Reads the instruction definitions from bytecodes.c.
|
|
|
|
Writes the cases to generated_cases.c.h, which is #included in ceval.c.
|
|
|
|
"""
|
2022-11-03 01:31:26 -03:00
|
|
|
|
|
|
|
import argparse
|
2022-11-22 20:04:57 -04:00
|
|
|
import contextlib
|
|
|
|
import dataclasses
|
2022-11-04 21:40:43 -03:00
|
|
|
import os
|
2023-02-20 10:56:48 -04:00
|
|
|
import posixpath
|
2022-11-04 19:30:17 -03:00
|
|
|
import re
|
2022-11-03 01:31:26 -03:00
|
|
|
import sys
|
2022-11-17 21:06:07 -04:00
|
|
|
import typing
|
2022-11-03 01:31:26 -03:00
|
|
|
|
2023-03-15 12:37:36 -03:00
|
|
|
import lexer as lx
|
2022-11-03 01:31:26 -03:00
|
|
|
import parser
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
from parser import StackEffect
|
2022-11-03 01:31:26 -03:00
|
|
|
|
2023-01-17 19:59:19 -04:00
|
|
|
HERE = os.path.dirname(__file__)
|
|
|
|
ROOT = os.path.join(HERE, "../..")
|
2023-02-20 10:56:48 -04:00
|
|
|
THIS = os.path.relpath(__file__, ROOT).replace(os.path.sep, posixpath.sep)
|
2023-01-17 19:59:19 -04:00
|
|
|
|
|
|
|
DEFAULT_INPUT = os.path.relpath(os.path.join(ROOT, "Python/bytecodes.c"))
|
|
|
|
DEFAULT_OUTPUT = os.path.relpath(os.path.join(ROOT, "Python/generated_cases.c.h"))
|
2023-01-05 17:01:07 -04:00
|
|
|
DEFAULT_METADATA_OUTPUT = os.path.relpath(
|
2023-07-12 07:30:25 -03:00
|
|
|
os.path.join(ROOT, "Include/internal/pycore_opcode_metadata.h")
|
2023-01-05 17:01:07 -04:00
|
|
|
)
|
2023-06-19 19:47:04 -03:00
|
|
|
DEFAULT_PYMETADATA_OUTPUT = os.path.relpath(
|
|
|
|
os.path.join(ROOT, "Lib/_opcode_metadata.py")
|
|
|
|
)
|
2023-06-26 23:02:57 -03:00
|
|
|
DEFAULT_EXECUTOR_OUTPUT = os.path.relpath(
|
|
|
|
os.path.join(ROOT, "Python/executor_cases.c.h")
|
|
|
|
)
|
2022-11-17 21:06:07 -04:00
|
|
|
BEGIN_MARKER = "// BEGIN BYTECODES //"
|
|
|
|
END_MARKER = "// END BYTECODES //"
|
2023-01-29 21:28:39 -04:00
|
|
|
RE_PREDICTED = (
|
2023-06-07 14:00:10 -03:00
|
|
|
r"^\s*(?:GO_TO_INSTRUCTION\(|DEOPT_IF\(.*?,\s*)(\w+)\);\s*(?://.*)?$"
|
2023-01-29 21:28:39 -04:00
|
|
|
)
|
2022-11-22 20:04:57 -04:00
|
|
|
UNUSED = "unused"
|
|
|
|
BITS_PER_CODE_UNIT = 16
|
2022-11-09 14:50:09 -04:00
|
|
|
|
2023-07-07 14:42:10 -03:00
|
|
|
# Constants used instead of size for macro expansions.
|
|
|
|
# Note: 1, 2, 4 must match actual cache entry sizes.
|
|
|
|
OPARG_SIZES = {
|
|
|
|
"OPARG_FULL": 0,
|
|
|
|
"OPARG_CACHE_1": 1,
|
|
|
|
"OPARG_CACHE_2": 2,
|
|
|
|
"OPARG_CACHE_4": 4,
|
|
|
|
"OPARG_TOP": 5,
|
|
|
|
"OPARG_BOTTOM": 6,
|
|
|
|
}
|
|
|
|
|
2023-06-13 17:42:03 -03:00
|
|
|
RESERVED_WORDS = {
|
|
|
|
"co_consts" : "Use FRAME_CO_CONSTS.",
|
|
|
|
"co_names": "Use FRAME_CO_NAMES.",
|
|
|
|
}
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
arg_parser = argparse.ArgumentParser(
|
|
|
|
description="Generate the code for the interpreter switch.",
|
|
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
|
|
)
|
|
|
|
arg_parser.add_argument(
|
|
|
|
"-o", "--output", type=str, help="Generated code", default=DEFAULT_OUTPUT
|
|
|
|
)
|
2023-01-05 17:01:07 -04:00
|
|
|
arg_parser.add_argument(
|
2023-06-19 19:47:04 -03:00
|
|
|
"-m", "--metadata", type=str, help="Generated C metadata", default=DEFAULT_METADATA_OUTPUT
|
|
|
|
)
|
|
|
|
arg_parser.add_argument(
|
|
|
|
"-p", "--pymetadata", type=str, help="Generated Python metadata", default=DEFAULT_PYMETADATA_OUTPUT
|
2023-01-05 17:01:07 -04:00
|
|
|
)
|
2023-03-15 12:37:36 -03:00
|
|
|
arg_parser.add_argument(
|
|
|
|
"-l", "--emit-line-directives", help="Emit #line directives", action="store_true"
|
|
|
|
)
|
2023-03-04 00:59:21 -04:00
|
|
|
arg_parser.add_argument(
|
|
|
|
"input", nargs=argparse.REMAINDER, help="Instruction definition file(s)"
|
|
|
|
)
|
2023-06-26 23:02:57 -03:00
|
|
|
arg_parser.add_argument(
|
|
|
|
"-e",
|
|
|
|
"--executor-cases",
|
|
|
|
type=str,
|
|
|
|
help="Write executor cases to this file",
|
|
|
|
default=DEFAULT_EXECUTOR_OUTPUT,
|
|
|
|
)
|
2022-11-03 01:31:26 -03:00
|
|
|
|
|
|
|
|
2023-01-17 19:59:19 -04:00
|
|
|
def effect_size(effect: StackEffect) -> tuple[int, str]:
|
|
|
|
"""Return the 'size' impact of a stack effect.
|
|
|
|
|
|
|
|
Returns a tuple (numeric, symbolic) where:
|
|
|
|
|
|
|
|
- numeric is an int giving the statically analyzable size of the effect
|
|
|
|
- symbolic is a string representing a variable effect (e.g. 'oparg*2')
|
|
|
|
|
|
|
|
At most one of these will be non-zero / non-empty.
|
|
|
|
"""
|
|
|
|
if effect.size:
|
2023-01-29 21:28:39 -04:00
|
|
|
assert not effect.cond, "Array effects cannot have a condition"
|
2023-01-17 19:59:19 -04:00
|
|
|
return 0, effect.size
|
2023-01-29 21:28:39 -04:00
|
|
|
elif effect.cond:
|
2023-07-16 21:09:11 -03:00
|
|
|
if effect.cond in ("0", "1"):
|
|
|
|
return 0, effect.cond
|
2023-01-29 21:28:39 -04:00
|
|
|
return 0, f"{maybe_parenthesize(effect.cond)} ? 1 : 0"
|
2023-01-17 19:59:19 -04:00
|
|
|
else:
|
|
|
|
return 1, ""
|
|
|
|
|
|
|
|
|
|
|
|
def maybe_parenthesize(sym: str) -> str:
|
|
|
|
"""Add parentheses around a string if it contains an operator.
|
|
|
|
|
|
|
|
An exception is made for '*' which is common and harmless
|
|
|
|
in the context where the symbolic size is used.
|
|
|
|
"""
|
|
|
|
if re.match(r"^[\s\w*]+$", sym):
|
|
|
|
return sym
|
|
|
|
else:
|
|
|
|
return f"({sym})"
|
|
|
|
|
|
|
|
|
|
|
|
def list_effect_size(effects: list[StackEffect]) -> tuple[int, str]:
|
|
|
|
numeric = 0
|
|
|
|
symbolic: list[str] = []
|
|
|
|
for effect in effects:
|
|
|
|
diff, sym = effect_size(effect)
|
|
|
|
numeric += diff
|
|
|
|
if sym:
|
|
|
|
symbolic.append(maybe_parenthesize(sym))
|
|
|
|
return numeric, " + ".join(symbolic)
|
|
|
|
|
|
|
|
|
|
|
|
def string_effect_size(arg: tuple[int, str]) -> str:
|
|
|
|
numeric, symbolic = arg
|
|
|
|
if numeric and symbolic:
|
|
|
|
return f"{numeric} + {symbolic}"
|
|
|
|
elif symbolic:
|
|
|
|
return symbolic
|
|
|
|
else:
|
|
|
|
return str(numeric)
|
|
|
|
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
class Formatter:
|
|
|
|
"""Wraps an output stream with the ability to indent etc."""
|
|
|
|
|
|
|
|
stream: typing.TextIO
|
|
|
|
prefix: str
|
2023-03-15 12:37:36 -03:00
|
|
|
emit_line_directives: bool = False
|
|
|
|
lineno: int # Next line number, 1-based
|
|
|
|
filename: str # Slightly improved stream.filename
|
|
|
|
nominal_lineno: int
|
|
|
|
nominal_filename: str
|
|
|
|
|
|
|
|
def __init__(
|
2023-06-19 19:47:04 -03:00
|
|
|
self, stream: typing.TextIO, indent: int,
|
|
|
|
emit_line_directives: bool = False, comment: str = "//",
|
2023-03-15 12:37:36 -03:00
|
|
|
) -> None:
|
2022-12-02 23:57:30 -04:00
|
|
|
self.stream = stream
|
|
|
|
self.prefix = " " * indent
|
2023-03-15 12:37:36 -03:00
|
|
|
self.emit_line_directives = emit_line_directives
|
2023-06-19 19:47:04 -03:00
|
|
|
self.comment = comment
|
2023-03-15 12:37:36 -03:00
|
|
|
self.lineno = 1
|
2023-07-16 20:05:24 -03:00
|
|
|
self.filename = prettify_filename(self.stream.name)
|
2023-03-15 12:37:36 -03:00
|
|
|
self.nominal_lineno = 1
|
2023-07-16 20:05:24 -03:00
|
|
|
self.nominal_filename = self.filename
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
def write_raw(self, s: str) -> None:
|
|
|
|
self.stream.write(s)
|
2023-03-15 12:37:36 -03:00
|
|
|
newlines = s.count("\n")
|
|
|
|
self.lineno += newlines
|
|
|
|
self.nominal_lineno += newlines
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
def emit(self, arg: str) -> None:
|
|
|
|
if arg:
|
|
|
|
self.write_raw(f"{self.prefix}{arg}\n")
|
|
|
|
else:
|
|
|
|
self.write_raw("\n")
|
|
|
|
|
2023-03-15 12:37:36 -03:00
|
|
|
def set_lineno(self, lineno: int, filename: str) -> None:
|
|
|
|
if self.emit_line_directives:
|
|
|
|
if lineno != self.nominal_lineno or filename != self.nominal_filename:
|
|
|
|
self.emit(f'#line {lineno} "{filename}"')
|
|
|
|
self.nominal_lineno = lineno
|
|
|
|
self.nominal_filename = filename
|
|
|
|
|
|
|
|
def reset_lineno(self) -> None:
|
|
|
|
if self.lineno != self.nominal_lineno or self.filename != self.nominal_filename:
|
|
|
|
self.set_lineno(self.lineno + 1, self.filename)
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
@contextlib.contextmanager
|
|
|
|
def indent(self):
|
|
|
|
self.prefix += " "
|
|
|
|
yield
|
|
|
|
self.prefix = self.prefix[:-4]
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
2023-06-26 23:02:57 -03:00
|
|
|
def block(self, head: str, tail: str = ""):
|
2022-12-02 23:57:30 -04:00
|
|
|
if head:
|
|
|
|
self.emit(head + " {")
|
|
|
|
else:
|
|
|
|
self.emit("{")
|
|
|
|
with self.indent():
|
|
|
|
yield
|
2023-06-26 23:02:57 -03:00
|
|
|
self.emit("}" + tail)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-01-29 21:28:39 -04:00
|
|
|
def stack_adjust(
|
|
|
|
self,
|
|
|
|
input_effects: list[StackEffect],
|
|
|
|
output_effects: list[StackEffect],
|
|
|
|
):
|
2023-01-17 19:59:19 -04:00
|
|
|
shrink, isym = list_effect_size(input_effects)
|
|
|
|
grow, osym = list_effect_size(output_effects)
|
2023-06-14 17:50:48 -03:00
|
|
|
diff = grow - shrink
|
2023-01-17 19:59:19 -04:00
|
|
|
if isym and isym != osym:
|
|
|
|
self.emit(f"STACK_SHRINK({isym});")
|
|
|
|
if diff < 0:
|
|
|
|
self.emit(f"STACK_SHRINK({-diff});")
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
if diff > 0:
|
|
|
|
self.emit(f"STACK_GROW({diff});")
|
2023-01-17 19:59:19 -04:00
|
|
|
if osym and osym != isym:
|
|
|
|
self.emit(f"STACK_GROW({osym});")
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
def declare(self, dst: StackEffect, src: StackEffect | None):
|
2023-07-16 21:09:11 -03:00
|
|
|
if dst.name == UNUSED or dst.cond == "0":
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
return
|
2023-01-17 19:59:19 -04:00
|
|
|
typ = f"{dst.type}" if dst.type else "PyObject *"
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
if src:
|
|
|
|
cast = self.cast(dst, src)
|
|
|
|
init = f" = {cast}{src.name}"
|
2023-01-29 21:28:39 -04:00
|
|
|
elif dst.cond:
|
|
|
|
init = " = NULL"
|
|
|
|
else:
|
|
|
|
init = ""
|
2023-01-17 19:59:19 -04:00
|
|
|
sepa = "" if typ.endswith("*") else " "
|
|
|
|
self.emit(f"{typ}{sepa}{dst.name}{init};")
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
def assign(self, dst: StackEffect, src: StackEffect):
|
|
|
|
if src.name == UNUSED:
|
|
|
|
return
|
2023-02-28 12:49:35 -04:00
|
|
|
if src.size:
|
|
|
|
# Don't write sized arrays -- it's up to the user code.
|
|
|
|
return
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
cast = self.cast(dst, src)
|
2023-02-28 12:49:35 -04:00
|
|
|
if re.match(r"^REG\(oparg(\d+)\)$", dst.name):
|
|
|
|
self.emit(f"Py_XSETREF({dst.name}, {cast}{src.name});")
|
|
|
|
else:
|
|
|
|
stmt = f"{dst.name} = {cast}{src.name};"
|
2023-07-16 21:09:11 -03:00
|
|
|
if src.cond and src.cond != "1":
|
|
|
|
if src.cond == "0":
|
|
|
|
# It will not be executed
|
|
|
|
return
|
2023-01-29 21:28:39 -04:00
|
|
|
stmt = f"if ({src.cond}) {{ {stmt} }}"
|
|
|
|
self.emit(stmt)
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
def cast(self, dst: StackEffect, src: StackEffect) -> str:
|
|
|
|
return f"({dst.type or 'PyObject *'})" if src.type != dst.type else ""
|
|
|
|
|
2023-06-21 20:14:43 -03:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class InstructionFlags:
|
|
|
|
"""Construct and manipulate instruction flags"""
|
|
|
|
|
|
|
|
HAS_ARG_FLAG: bool
|
|
|
|
HAS_CONST_FLAG: bool
|
|
|
|
HAS_NAME_FLAG: bool
|
|
|
|
HAS_JUMP_FLAG: bool
|
|
|
|
|
|
|
|
def __post_init__(self):
|
|
|
|
self.bitmask = {
|
|
|
|
name : (1 << i) for i, name in enumerate(self.names())
|
|
|
|
}
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def fromInstruction(instr: "AnyInstruction"):
|
|
|
|
return InstructionFlags(
|
|
|
|
HAS_ARG_FLAG=variable_used(instr, "oparg"),
|
|
|
|
HAS_CONST_FLAG=variable_used(instr, "FRAME_CO_CONSTS"),
|
|
|
|
HAS_NAME_FLAG=variable_used(instr, "FRAME_CO_NAMES"),
|
|
|
|
HAS_JUMP_FLAG=variable_used(instr, "JUMPBY"),
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def newEmpty():
|
|
|
|
return InstructionFlags(False, False, False, False)
|
|
|
|
|
|
|
|
def add(self, other: "InstructionFlags") -> None:
|
|
|
|
for name, value in dataclasses.asdict(other).items():
|
|
|
|
if value:
|
|
|
|
setattr(self, name, value)
|
|
|
|
|
|
|
|
def names(self, value=None):
|
|
|
|
if value is None:
|
|
|
|
return dataclasses.asdict(self).keys()
|
|
|
|
return [n for n, v in dataclasses.asdict(self).items() if v == value]
|
|
|
|
|
|
|
|
def bitmap(self) -> int:
|
|
|
|
flags = 0
|
|
|
|
for name in self.names():
|
|
|
|
if getattr(self, name):
|
|
|
|
flags |= self.bitmask[name]
|
|
|
|
return flags
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def emit_macros(cls, out: Formatter):
|
|
|
|
flags = cls.newEmpty()
|
|
|
|
for name, value in flags.bitmask.items():
|
|
|
|
out.emit(f"#define {name} ({value})");
|
|
|
|
|
|
|
|
for name, value in flags.bitmask.items():
|
|
|
|
out.emit(
|
|
|
|
f"#define OPCODE_{name[:-len('_FLAG')]}(OP) "
|
2023-07-14 14:41:52 -03:00
|
|
|
f"(_PyOpcode_opcode_metadata[OP].flags & ({name}))")
|
2023-06-21 20:14:43 -03:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class ActiveCacheEffect:
|
|
|
|
"""Wraps a CacheEffect that is actually used, in context."""
|
|
|
|
effect: parser.CacheEffect
|
|
|
|
offset: int
|
|
|
|
|
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
FORBIDDEN_NAMES_IN_UOPS = (
|
2023-07-03 17:05:11 -03:00
|
|
|
"resume_with_error",
|
2023-06-26 23:02:57 -03:00
|
|
|
"kwnames",
|
|
|
|
"next_instr",
|
|
|
|
"oparg1", # Proxy for super-instructions like LOAD_FAST_LOAD_FAST
|
|
|
|
"JUMPBY",
|
|
|
|
"DISPATCH",
|
|
|
|
"INSTRUMENTED_JUMP",
|
|
|
|
"throwflag",
|
|
|
|
"exception_unwind",
|
|
|
|
"import_from",
|
|
|
|
"import_name",
|
|
|
|
"_PyObject_CallNoArgs", # Proxy for BEFORE_WITH
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
# Interpreter tiers
|
|
|
|
TIER_ONE = 1 # Specializing adaptive interpreter (PEP 659)
|
|
|
|
TIER_TWO = 2 # Experimental tracing interpreter
|
|
|
|
Tiers: typing.TypeAlias = typing.Literal[1, 2]
|
|
|
|
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class Instruction:
|
2022-11-17 21:06:07 -04:00
|
|
|
"""An instruction with additional data and code."""
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
# Parts of the underlying instruction definition
|
|
|
|
inst: parser.InstDef
|
2023-06-12 15:19:04 -03:00
|
|
|
kind: typing.Literal["inst", "op"]
|
2022-12-02 23:57:30 -04:00
|
|
|
name: str
|
|
|
|
block: parser.Block
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
block_text: list[str] # Block.text, less curlies, less PREDICT() calls
|
2023-03-15 12:37:36 -03:00
|
|
|
block_line: int # First line of block in original code
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2022-11-17 21:06:07 -04:00
|
|
|
# Computed by constructor
|
|
|
|
always_exits: bool
|
|
|
|
cache_offset: int
|
|
|
|
cache_effects: list[parser.CacheEffect]
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
input_effects: list[StackEffect]
|
|
|
|
output_effects: list[StackEffect]
|
2023-01-09 19:53:01 -04:00
|
|
|
unmoved_names: frozenset[str]
|
|
|
|
instr_fmt: str
|
2023-06-21 20:14:43 -03:00
|
|
|
instr_flags: InstructionFlags
|
2023-06-28 15:28:07 -03:00
|
|
|
active_caches: list[ActiveCacheEffect]
|
2023-01-09 19:53:01 -04:00
|
|
|
|
2022-11-17 21:06:07 -04:00
|
|
|
# Set later
|
|
|
|
family: parser.Family | None = None
|
|
|
|
predicted: bool = False
|
|
|
|
|
|
|
|
def __init__(self, inst: parser.InstDef):
|
2022-12-02 23:57:30 -04:00
|
|
|
self.inst = inst
|
|
|
|
self.kind = inst.kind
|
|
|
|
self.name = inst.name
|
|
|
|
self.block = inst.block
|
2023-06-07 14:00:10 -03:00
|
|
|
self.block_text, self.check_eval_breaker, self.block_line = \
|
2023-02-08 00:03:22 -04:00
|
|
|
extract_block_text(self.block)
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
self.always_exits = always_exits(self.block_text)
|
2022-11-17 21:06:07 -04:00
|
|
|
self.cache_effects = [
|
2022-12-02 23:57:30 -04:00
|
|
|
effect for effect in inst.inputs if isinstance(effect, parser.CacheEffect)
|
2022-11-17 21:06:07 -04:00
|
|
|
]
|
|
|
|
self.cache_offset = sum(c.size for c in self.cache_effects)
|
|
|
|
self.input_effects = [
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
effect for effect in inst.inputs if isinstance(effect, StackEffect)
|
2022-11-17 21:06:07 -04:00
|
|
|
]
|
2022-12-02 23:57:30 -04:00
|
|
|
self.output_effects = inst.outputs # For consistency/completeness
|
2022-12-27 21:11:03 -04:00
|
|
|
unmoved_names: set[str] = set()
|
|
|
|
for ieffect, oeffect in zip(self.input_effects, self.output_effects):
|
|
|
|
if ieffect.name == oeffect.name:
|
|
|
|
unmoved_names.add(ieffect.name)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
self.unmoved_names = frozenset(unmoved_names)
|
2023-06-13 17:42:03 -03:00
|
|
|
|
2023-06-21 20:14:43 -03:00
|
|
|
self.instr_flags = InstructionFlags.fromInstruction(inst)
|
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
self.active_caches = []
|
|
|
|
offset = 0
|
|
|
|
for effect in self.cache_effects:
|
|
|
|
if effect.name != UNUSED:
|
|
|
|
self.active_caches.append(ActiveCacheEffect(effect, offset))
|
|
|
|
offset += effect.size
|
|
|
|
|
2023-06-21 20:14:43 -03:00
|
|
|
if self.instr_flags.HAS_ARG_FLAG:
|
2023-03-15 18:25:31 -03:00
|
|
|
fmt = "IB"
|
2023-01-09 19:53:01 -04:00
|
|
|
else:
|
2023-03-15 18:25:31 -03:00
|
|
|
fmt = "IX"
|
2023-06-28 15:28:07 -03:00
|
|
|
if offset:
|
|
|
|
fmt += "C" + "0"*(offset-1)
|
2023-01-09 19:53:01 -04:00
|
|
|
self.instr_fmt = fmt
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
def is_viable_uop(self) -> bool:
|
|
|
|
"""Whether this instruction is viable as a uop."""
|
2023-07-17 15:02:58 -03:00
|
|
|
dprint: typing.Callable[..., None] = lambda *args, **kwargs: None
|
|
|
|
# if self.name.startswith("CALL"):
|
|
|
|
# dprint = print
|
|
|
|
|
2023-07-13 16:14:51 -03:00
|
|
|
if self.name == "EXIT_TRACE":
|
|
|
|
return True # This has 'return frame' but it's okay
|
2023-06-26 23:02:57 -03:00
|
|
|
if self.always_exits:
|
2023-07-17 15:02:58 -03:00
|
|
|
dprint(f"Skipping {self.name} because it always exits")
|
2023-06-26 23:02:57 -03:00
|
|
|
return False
|
2023-07-17 16:12:33 -03:00
|
|
|
if len(self.active_caches) > 1:
|
|
|
|
# print(f"Skipping {self.name} because it has >1 cache entries")
|
|
|
|
return False
|
2023-07-03 17:05:11 -03:00
|
|
|
res = True
|
2023-06-26 23:02:57 -03:00
|
|
|
for forbidden in FORBIDDEN_NAMES_IN_UOPS:
|
2023-07-07 15:03:27 -03:00
|
|
|
# NOTE: To disallow unspecialized uops, use
|
|
|
|
# if variable_used(self.inst, forbidden):
|
|
|
|
if variable_used_unspecialized(self.inst, forbidden):
|
2023-07-17 15:02:58 -03:00
|
|
|
dprint(f"Skipping {self.name} because it uses {forbidden}")
|
2023-07-03 17:05:11 -03:00
|
|
|
res = False
|
|
|
|
return res
|
2023-06-26 23:02:57 -03:00
|
|
|
|
|
|
|
def write(self, out: Formatter, tier: Tiers = TIER_ONE) -> None:
|
2022-11-17 21:06:07 -04:00
|
|
|
"""Write one instruction, sans prologue and epilogue."""
|
2022-12-02 23:57:30 -04:00
|
|
|
# Write a static assertion that a family's cache size is correct
|
2022-11-17 21:06:07 -04:00
|
|
|
if family := self.family:
|
2023-07-16 12:16:34 -03:00
|
|
|
if self.name == family.name:
|
2022-11-17 21:06:07 -04:00
|
|
|
if cache_size := family.size:
|
2022-12-02 23:57:30 -04:00
|
|
|
out.emit(
|
|
|
|
f"static_assert({cache_size} == "
|
|
|
|
f'{self.cache_offset}, "incorrect cache size");'
|
2022-11-22 20:04:57 -04:00
|
|
|
)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-03-15 18:25:31 -03:00
|
|
|
# Write input stack effect variable declarations and initializations
|
|
|
|
ieffects = list(reversed(self.input_effects))
|
|
|
|
for i, ieffect in enumerate(ieffects):
|
|
|
|
isize = string_effect_size(
|
|
|
|
list_effect_size([ieff for ieff in ieffects[: i + 1]])
|
|
|
|
)
|
|
|
|
if ieffect.size:
|
|
|
|
src = StackEffect(f"(stack_pointer - {maybe_parenthesize(isize)})", "PyObject **")
|
|
|
|
elif ieffect.cond:
|
|
|
|
src = StackEffect(f"({ieffect.cond}) ? stack_pointer[-{maybe_parenthesize(isize)}] : NULL", "")
|
|
|
|
else:
|
|
|
|
src = StackEffect(f"stack_pointer[-{maybe_parenthesize(isize)}]", "")
|
|
|
|
out.declare(ieffect, src)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
# Write output stack effect variable declarations
|
2023-02-07 19:44:37 -04:00
|
|
|
isize = string_effect_size(list_effect_size(self.input_effects))
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
input_names = {ieffect.name for ieffect in self.input_effects}
|
2023-02-07 19:44:37 -04:00
|
|
|
for i, oeffect in enumerate(self.output_effects):
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
if oeffect.name not in input_names:
|
2023-02-07 19:44:37 -04:00
|
|
|
if oeffect.size:
|
|
|
|
osize = string_effect_size(
|
|
|
|
list_effect_size([oeff for oeff in self.output_effects[:i]])
|
|
|
|
)
|
|
|
|
offset = "stack_pointer"
|
|
|
|
if isize != osize:
|
|
|
|
if isize != "0":
|
|
|
|
offset += f" - ({isize})"
|
|
|
|
if osize != "0":
|
|
|
|
offset += f" + {osize}"
|
|
|
|
src = StackEffect(offset, "PyObject **")
|
|
|
|
out.declare(oeffect, src)
|
|
|
|
else:
|
|
|
|
out.declare(oeffect, None)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-02-28 12:49:35 -04:00
|
|
|
# out.emit(f"next_instr += OPSIZE({self.inst.name}) - 1;")
|
2023-01-05 17:01:07 -04:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
self.write_body(out, 0, self.active_caches, tier=tier)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
# Skip the rest if the block always exits
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
if self.always_exits:
|
2022-11-17 21:06:07 -04:00
|
|
|
return
|
|
|
|
|
2023-03-15 18:25:31 -03:00
|
|
|
# Write net stack growth/shrinkage
|
|
|
|
out.stack_adjust(
|
|
|
|
[ieff for ieff in self.input_effects],
|
|
|
|
[oeff for oeff in self.output_effects],
|
|
|
|
)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-03-15 18:25:31 -03:00
|
|
|
# Write output stack effect assignments
|
|
|
|
oeffects = list(reversed(self.output_effects))
|
|
|
|
for i, oeffect in enumerate(oeffects):
|
|
|
|
if oeffect.name in self.unmoved_names:
|
|
|
|
continue
|
|
|
|
osize = string_effect_size(
|
|
|
|
list_effect_size([oeff for oeff in oeffects[: i + 1]])
|
|
|
|
)
|
|
|
|
if oeffect.size:
|
|
|
|
dst = StackEffect(f"stack_pointer - {maybe_parenthesize(osize)}", "PyObject **")
|
|
|
|
else:
|
|
|
|
dst = StackEffect(f"stack_pointer[-{maybe_parenthesize(osize)}]", "")
|
|
|
|
out.assign(dst, oeffect)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
# Write cache effect
|
2023-06-26 23:02:57 -03:00
|
|
|
if tier == TIER_ONE and self.cache_offset:
|
2023-02-28 12:49:35 -04:00
|
|
|
out.emit(f"next_instr += {self.cache_offset};")
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
def write_body(
|
|
|
|
self,
|
|
|
|
out: Formatter,
|
|
|
|
dedent: int,
|
2023-06-28 15:28:07 -03:00
|
|
|
active_caches: list[ActiveCacheEffect],
|
2023-06-26 23:02:57 -03:00
|
|
|
tier: Tiers = TIER_ONE,
|
|
|
|
) -> None:
|
2022-11-17 21:06:07 -04:00
|
|
|
"""Write the instruction body."""
|
2022-12-02 23:57:30 -04:00
|
|
|
# Write cache effect variable declarations and initializations
|
2023-06-28 15:28:07 -03:00
|
|
|
for active in active_caches:
|
|
|
|
ceffect = active.effect
|
|
|
|
bits = ceffect.size * BITS_PER_CODE_UNIT
|
|
|
|
if bits == 64:
|
|
|
|
# NOTE: We assume that 64-bit data in the cache
|
|
|
|
# is always an object pointer.
|
|
|
|
# If this becomes false, we need a way to specify
|
|
|
|
# syntactically what type the cache data is.
|
|
|
|
typ = "PyObject *"
|
|
|
|
func = "read_obj"
|
|
|
|
else:
|
|
|
|
typ = f"uint{bits}_t "
|
|
|
|
func = f"read_u{bits}"
|
|
|
|
if tier == TIER_ONE:
|
|
|
|
out.emit(
|
|
|
|
f"{typ}{ceffect.name} = {func}(&next_instr[{active.offset}].cache);"
|
|
|
|
)
|
|
|
|
else:
|
2023-06-30 16:11:10 -03:00
|
|
|
out.emit(f"{typ}{ceffect.name} = ({typ.strip()})operand;")
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2022-12-17 00:45:55 -04:00
|
|
|
# Write the body, substituting a goto for ERROR_IF() and other stuff
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
assert dedent <= 0
|
|
|
|
extra = " " * -dedent
|
2023-02-08 15:40:10 -04:00
|
|
|
names_to_skip = self.unmoved_names | frozenset({UNUSED, "null"})
|
2023-03-15 12:37:36 -03:00
|
|
|
offset = 0
|
|
|
|
context = self.block.context
|
2023-06-12 14:47:08 -03:00
|
|
|
assert context is not None and context.owner is not None
|
2023-03-15 12:37:36 -03:00
|
|
|
filename = context.owner.filename
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
for line in self.block_text:
|
2023-03-15 12:37:36 -03:00
|
|
|
out.set_lineno(self.block_line + offset, filename)
|
|
|
|
offset += 1
|
2023-01-24 18:39:13 -04:00
|
|
|
if m := re.match(r"(\s*)ERROR_IF\((.+), (\w+)\);\s*(?://.*)?$", line):
|
2022-11-17 21:06:07 -04:00
|
|
|
space, cond, label = m.groups()
|
2023-02-08 15:40:10 -04:00
|
|
|
space = extra + space
|
2022-11-17 21:06:07 -04:00
|
|
|
# ERROR_IF() must pop the inputs from the stack.
|
|
|
|
# The code block is responsible for DECREF()ing them.
|
|
|
|
# NOTE: If the label doesn't exist, just add it to ceval.c.
|
2023-03-15 18:25:31 -03:00
|
|
|
|
|
|
|
# Don't pop common input/output effects at the bottom!
|
|
|
|
# These aren't DECREF'ed so they can stay.
|
|
|
|
ieffs = list(self.input_effects)
|
|
|
|
oeffs = list(self.output_effects)
|
|
|
|
while ieffs and oeffs and ieffs[0] == oeffs[0]:
|
|
|
|
ieffs.pop(0)
|
|
|
|
oeffs.pop(0)
|
|
|
|
ninputs, symbolic = list_effect_size(ieffs)
|
|
|
|
if ninputs:
|
|
|
|
label = f"pop_{ninputs}_{label}"
|
2023-01-17 19:59:19 -04:00
|
|
|
if symbolic:
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
out.write_raw(
|
2023-02-08 15:40:10 -04:00
|
|
|
f"{space}if ({cond}) {{ STACK_SHRINK({symbolic}); goto {label}; }}\n"
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
)
|
2022-11-17 21:06:07 -04:00
|
|
|
else:
|
2023-02-08 15:40:10 -04:00
|
|
|
out.write_raw(f"{space}if ({cond}) goto {label};\n")
|
2023-01-24 18:39:13 -04:00
|
|
|
elif m := re.match(r"(\s*)DECREF_INPUTS\(\);\s*(?://.*)?$", line):
|
2023-03-15 18:25:31 -03:00
|
|
|
out.reset_lineno()
|
|
|
|
space = extra + m.group(1)
|
|
|
|
for ieff in self.input_effects:
|
|
|
|
if ieff.name in names_to_skip:
|
|
|
|
continue
|
|
|
|
if ieff.size:
|
|
|
|
out.write_raw(
|
|
|
|
f"{space}for (int _i = {ieff.size}; --_i >= 0;) {{\n"
|
|
|
|
)
|
|
|
|
out.write_raw(f"{space} Py_DECREF({ieff.name}[_i]);\n")
|
|
|
|
out.write_raw(f"{space}}}\n")
|
|
|
|
else:
|
|
|
|
decref = "XDECREF" if ieff.cond else "DECREF"
|
|
|
|
out.write_raw(f"{space}Py_{decref}({ieff.name});\n")
|
2022-11-17 21:06:07 -04:00
|
|
|
else:
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
out.write_raw(extra + line)
|
2023-03-15 12:37:36 -03:00
|
|
|
out.reset_lineno()
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
|
|
|
|
InstructionOrCacheEffect = Instruction | parser.CacheEffect
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
StackEffectMapping = list[tuple[StackEffect, StackEffect]]
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
|
2022-11-22 20:04:57 -04:00
|
|
|
@dataclasses.dataclass
|
2022-12-02 23:57:30 -04:00
|
|
|
class Component:
|
2022-11-22 20:04:57 -04:00
|
|
|
instr: Instruction
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
input_mapping: StackEffectMapping
|
|
|
|
output_mapping: StackEffectMapping
|
2023-06-28 15:28:07 -03:00
|
|
|
active_caches: list[ActiveCacheEffect]
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
def write_body(self, out: Formatter) -> None:
|
2022-12-02 23:57:30 -04:00
|
|
|
with out.block(""):
|
2023-01-30 15:23:57 -04:00
|
|
|
input_names = {ieffect.name for _, ieffect in self.input_mapping}
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
for var, ieffect in self.input_mapping:
|
|
|
|
out.declare(ieffect, var)
|
|
|
|
for _, oeffect in self.output_mapping:
|
2023-01-30 15:23:57 -04:00
|
|
|
if oeffect.name not in input_names:
|
|
|
|
out.declare(oeffect, None)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
self.instr.write_body(out, -4, self.active_caches)
|
2022-11-22 20:04:57 -04:00
|
|
|
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
for var, oeffect in self.output_mapping:
|
|
|
|
out.assign(var, oeffect)
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-06-29 17:02:25 -03:00
|
|
|
MacroParts = list[Component | parser.CacheEffect]
|
|
|
|
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
@dataclasses.dataclass
|
2023-06-12 14:47:08 -03:00
|
|
|
class MacroInstruction:
|
|
|
|
"""A macro instruction."""
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
name: str
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
stack: list[StackEffect]
|
2022-11-22 20:04:57 -04:00
|
|
|
initial_sp: int
|
|
|
|
final_sp: int
|
2023-01-09 19:53:01 -04:00
|
|
|
instr_fmt: str
|
2023-06-21 20:14:43 -03:00
|
|
|
instr_flags: InstructionFlags
|
2022-12-02 23:57:30 -04:00
|
|
|
macro: parser.Macro
|
2023-06-29 17:02:25 -03:00
|
|
|
parts: MacroParts
|
2023-06-28 15:28:07 -03:00
|
|
|
cache_offset: int
|
2023-05-31 12:09:23 -03:00
|
|
|
predicted: bool = False
|
2022-11-22 20:04:57 -04:00
|
|
|
|
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class PseudoInstruction:
|
|
|
|
"""A pseudo instruction."""
|
|
|
|
|
|
|
|
name: str
|
|
|
|
targets: list[Instruction]
|
2023-06-13 17:42:03 -03:00
|
|
|
instr_fmt: str
|
2023-06-21 20:14:43 -03:00
|
|
|
instr_flags: InstructionFlags
|
2023-06-11 18:31:59 -03:00
|
|
|
|
|
|
|
|
2023-03-04 00:59:21 -04:00
|
|
|
@dataclasses.dataclass
|
|
|
|
class OverriddenInstructionPlaceHolder:
|
|
|
|
name: str
|
|
|
|
|
|
|
|
|
2023-06-14 14:06:50 -03:00
|
|
|
AnyInstruction = Instruction | MacroInstruction | PseudoInstruction
|
2023-01-09 19:53:01 -04:00
|
|
|
INSTR_FMT_PREFIX = "INSTR_FMT_"
|
|
|
|
|
|
|
|
|
2022-11-17 21:06:07 -04:00
|
|
|
class Analyzer:
|
|
|
|
"""Parse input, analyze it, and write to output."""
|
|
|
|
|
2023-03-04 00:59:21 -04:00
|
|
|
input_filenames: list[str]
|
2022-12-02 23:57:30 -04:00
|
|
|
output_filename: str
|
2023-02-08 20:23:19 -04:00
|
|
|
metadata_filename: str
|
2023-06-19 19:47:04 -03:00
|
|
|
pymetadata_filename: str
|
2023-06-26 23:02:57 -03:00
|
|
|
executor_filename: str
|
2022-11-17 21:06:07 -04:00
|
|
|
errors: int = 0
|
2023-03-15 12:37:36 -03:00
|
|
|
emit_line_directives: bool = False
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
input_filenames: list[str],
|
|
|
|
output_filename: str,
|
|
|
|
metadata_filename: str,
|
|
|
|
pymetadata_filename: str,
|
|
|
|
executor_filename: str,
|
|
|
|
):
|
2022-12-02 23:57:30 -04:00
|
|
|
"""Read the input file."""
|
2023-03-04 00:59:21 -04:00
|
|
|
self.input_filenames = input_filenames
|
2022-12-02 23:57:30 -04:00
|
|
|
self.output_filename = output_filename
|
2023-02-08 20:23:19 -04:00
|
|
|
self.metadata_filename = metadata_filename
|
2023-06-19 19:47:04 -03:00
|
|
|
self.pymetadata_filename = pymetadata_filename
|
2023-06-26 23:02:57 -03:00
|
|
|
self.executor_filename = executor_filename
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2022-11-22 20:04:57 -04:00
|
|
|
def error(self, msg: str, node: parser.Node) -> None:
|
|
|
|
lineno = 0
|
2023-03-04 00:59:21 -04:00
|
|
|
filename = "<unknown file>"
|
2022-11-22 20:04:57 -04:00
|
|
|
if context := node.context:
|
2023-03-04 00:59:21 -04:00
|
|
|
filename = context.owner.filename
|
2022-11-22 20:04:57 -04:00
|
|
|
# Use line number of first non-comment in the node
|
2022-12-02 23:57:30 -04:00
|
|
|
for token in context.owner.tokens[context.begin : context.end]:
|
2022-11-22 20:04:57 -04:00
|
|
|
lineno = token.line
|
|
|
|
if token.kind != "COMMENT":
|
|
|
|
break
|
2023-03-04 00:59:21 -04:00
|
|
|
print(f"{filename}:{lineno}: {msg}", file=sys.stderr)
|
2022-11-22 20:04:57 -04:00
|
|
|
self.errors += 1
|
|
|
|
|
2023-03-04 00:59:21 -04:00
|
|
|
everything: list[
|
2023-06-12 14:47:08 -03:00
|
|
|
parser.InstDef | parser.Macro | parser.Pseudo | OverriddenInstructionPlaceHolder
|
2023-03-04 00:59:21 -04:00
|
|
|
]
|
2022-11-22 20:04:57 -04:00
|
|
|
instrs: dict[str, Instruction] # Includes ops
|
2022-12-02 23:57:30 -04:00
|
|
|
macros: dict[str, parser.Macro]
|
|
|
|
macro_instrs: dict[str, MacroInstruction]
|
2022-11-17 21:06:07 -04:00
|
|
|
families: dict[str, parser.Family]
|
2023-06-14 14:06:50 -03:00
|
|
|
pseudos: dict[str, parser.Pseudo]
|
2023-06-11 18:31:59 -03:00
|
|
|
pseudo_instrs: dict[str, PseudoInstruction]
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
def parse(self) -> None:
|
2022-12-02 23:57:30 -04:00
|
|
|
"""Parse the source text.
|
|
|
|
|
|
|
|
We only want the parser to see the stuff between the
|
|
|
|
begin and end markers.
|
|
|
|
"""
|
2023-03-04 00:59:21 -04:00
|
|
|
|
|
|
|
self.everything = []
|
|
|
|
self.instrs = {}
|
|
|
|
self.macros = {}
|
|
|
|
self.families = {}
|
2023-06-11 18:31:59 -03:00
|
|
|
self.pseudos = {}
|
2023-03-04 00:59:21 -04:00
|
|
|
|
|
|
|
instrs_idx: dict[str, int] = dict()
|
|
|
|
|
|
|
|
for filename in self.input_filenames:
|
|
|
|
self.parse_file(filename, instrs_idx)
|
|
|
|
|
|
|
|
files = " + ".join(self.input_filenames)
|
|
|
|
print(
|
|
|
|
f"Read {len(self.instrs)} instructions/ops, "
|
2023-06-12 14:47:08 -03:00
|
|
|
f"{len(self.macros)} macros, {len(self.pseudos)} pseudos, "
|
2023-03-04 00:59:21 -04:00
|
|
|
f"and {len(self.families)} families from {files}",
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
|
|
|
|
|
|
|
def parse_file(self, filename: str, instrs_idx: dict[str, int]) -> None:
|
|
|
|
with open(filename) as file:
|
|
|
|
src = file.read()
|
|
|
|
|
2023-07-16 20:05:24 -03:00
|
|
|
|
|
|
|
psr = parser.Parser(src, filename=prettify_filename(filename))
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
# Skip until begin marker
|
|
|
|
while tkn := psr.next(raw=True):
|
|
|
|
if tkn.text == BEGIN_MARKER:
|
|
|
|
break
|
2022-11-03 01:31:26 -03:00
|
|
|
else:
|
2022-11-22 20:04:57 -04:00
|
|
|
raise psr.make_syntax_error(
|
|
|
|
f"Couldn't find {BEGIN_MARKER!r} in {psr.filename}"
|
|
|
|
)
|
2022-12-02 23:57:30 -04:00
|
|
|
start = psr.getpos()
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
# Find end marker, then delete everything after it
|
|
|
|
while tkn := psr.next(raw=True):
|
|
|
|
if tkn.text == END_MARKER:
|
|
|
|
break
|
|
|
|
del psr.tokens[psr.getpos() - 1 :]
|
|
|
|
|
|
|
|
# Parse from start
|
|
|
|
psr.setpos(start)
|
2023-06-14 14:06:50 -03:00
|
|
|
thing: parser.InstDef | parser.Macro | parser.Pseudo | parser.Family | None
|
2023-03-04 00:59:21 -04:00
|
|
|
thing_first_token = psr.peek()
|
2022-12-02 23:57:30 -04:00
|
|
|
while thing := psr.definition():
|
2023-06-13 17:42:03 -03:00
|
|
|
if ws := [w for w in RESERVED_WORDS if variable_used(thing, w)]:
|
|
|
|
self.error(f"'{ws[0]}' is a reserved word. {RESERVED_WORDS[ws[0]]}", thing)
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
match thing:
|
|
|
|
case parser.InstDef(name=name):
|
2023-03-04 00:59:21 -04:00
|
|
|
if name in self.instrs:
|
|
|
|
if not thing.override:
|
|
|
|
raise psr.make_syntax_error(
|
|
|
|
f"Duplicate definition of '{name}' @ {thing.context} "
|
|
|
|
f"previous definition @ {self.instrs[name].inst.context}",
|
|
|
|
thing_first_token,
|
|
|
|
)
|
|
|
|
self.everything[instrs_idx[name]] = OverriddenInstructionPlaceHolder(name=name)
|
|
|
|
if name not in self.instrs and thing.override:
|
|
|
|
raise psr.make_syntax_error(
|
|
|
|
f"Definition of '{name}' @ {thing.context} is supposed to be "
|
|
|
|
"an override but no previous definition exists.",
|
|
|
|
thing_first_token,
|
|
|
|
)
|
2022-12-02 23:57:30 -04:00
|
|
|
self.instrs[name] = Instruction(thing)
|
2023-03-04 00:59:21 -04:00
|
|
|
instrs_idx[name] = len(self.everything)
|
2022-12-08 19:54:07 -04:00
|
|
|
self.everything.append(thing)
|
2022-12-02 23:57:30 -04:00
|
|
|
case parser.Macro(name):
|
|
|
|
self.macros[name] = thing
|
2022-12-08 19:54:07 -04:00
|
|
|
self.everything.append(thing)
|
2022-12-02 23:57:30 -04:00
|
|
|
case parser.Family(name):
|
|
|
|
self.families[name] = thing
|
2023-06-11 18:31:59 -03:00
|
|
|
case parser.Pseudo(name):
|
|
|
|
self.pseudos[name] = thing
|
|
|
|
self.everything.append(thing)
|
2022-12-02 23:57:30 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
if not psr.eof():
|
2023-03-04 00:59:21 -04:00
|
|
|
raise psr.make_syntax_error(f"Extra stuff at the end of {filename}")
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
def analyze(self) -> None:
|
|
|
|
"""Analyze the inputs.
|
|
|
|
|
|
|
|
Raises SystemExit if there is an error.
|
|
|
|
"""
|
2023-06-12 14:47:08 -03:00
|
|
|
self.analyze_macros_and_pseudos()
|
2023-05-31 12:09:23 -03:00
|
|
|
self.find_predictions()
|
2023-01-30 15:23:57 -04:00
|
|
|
self.map_families()
|
|
|
|
self.check_families()
|
2022-11-17 21:06:07 -04:00
|
|
|
|
|
|
|
def find_predictions(self) -> None:
|
|
|
|
"""Find the instructions that need PREDICTED() labels."""
|
|
|
|
for instr in self.instrs.values():
|
2023-06-12 14:47:08 -03:00
|
|
|
targets: set[str] = set()
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
for line in instr.block_text:
|
|
|
|
if m := re.match(RE_PREDICTED, line):
|
|
|
|
targets.add(m.group(1))
|
|
|
|
for target in targets:
|
2022-11-17 21:06:07 -04:00
|
|
|
if target_instr := self.instrs.get(target):
|
|
|
|
target_instr.predicted = True
|
2023-05-31 12:09:23 -03:00
|
|
|
elif target_macro := self.macro_instrs.get(target):
|
|
|
|
target_macro.predicted = True
|
2022-11-17 21:06:07 -04:00
|
|
|
else:
|
2022-11-22 20:04:57 -04:00
|
|
|
self.error(
|
2022-11-17 21:06:07 -04:00
|
|
|
f"Unknown instruction {target!r} predicted in {instr.name!r}",
|
2022-12-02 23:57:30 -04:00
|
|
|
instr.inst, # TODO: Use better location
|
2022-11-17 21:06:07 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def map_families(self) -> None:
|
2023-01-30 15:23:57 -04:00
|
|
|
"""Link instruction names back to their family, if they have one."""
|
2022-11-17 21:06:07 -04:00
|
|
|
for family in self.families.values():
|
2023-07-16 12:16:34 -03:00
|
|
|
for member in [family.name] + family.members:
|
2022-11-17 21:06:07 -04:00
|
|
|
if member_instr := self.instrs.get(member):
|
2023-01-30 15:23:57 -04:00
|
|
|
if member_instr.family not in (family, None):
|
|
|
|
self.error(
|
|
|
|
f"Instruction {member} is a member of multiple families "
|
|
|
|
f"({member_instr.family.name}, {family.name}).",
|
2023-01-30 21:27:51 -04:00
|
|
|
family,
|
2023-01-30 15:23:57 -04:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
member_instr.family = family
|
2023-07-13 12:36:19 -03:00
|
|
|
elif not self.macro_instrs.get(member):
|
2022-11-22 20:04:57 -04:00
|
|
|
self.error(
|
2022-11-17 21:06:07 -04:00
|
|
|
f"Unknown instruction {member!r} referenced in family {family.name!r}",
|
2022-11-22 20:04:57 -04:00
|
|
|
family,
|
2022-11-17 21:06:07 -04:00
|
|
|
)
|
|
|
|
|
|
|
|
def check_families(self) -> None:
|
|
|
|
"""Check each family:
|
|
|
|
|
|
|
|
- Must have at least 2 members
|
|
|
|
- All members must be known instructions
|
|
|
|
- All members must have the same cache, input and output effects
|
|
|
|
"""
|
|
|
|
for family in self.families.values():
|
2023-07-16 12:16:34 -03:00
|
|
|
if family.name not in self.macro_instrs and family.name not in self.instrs:
|
|
|
|
self.error(
|
|
|
|
f"Family {family.name!r} has unknown instruction {family.name!r}",
|
|
|
|
family,
|
|
|
|
)
|
2023-01-30 21:27:51 -04:00
|
|
|
members = [
|
|
|
|
member
|
|
|
|
for member in family.members
|
|
|
|
if member in self.instrs or member in self.macro_instrs
|
|
|
|
]
|
2022-11-17 21:06:07 -04:00
|
|
|
if members != family.members:
|
|
|
|
unknown = set(family.members) - set(members)
|
2022-12-02 23:57:30 -04:00
|
|
|
self.error(
|
|
|
|
f"Family {family.name!r} has unknown members: {unknown}", family
|
|
|
|
)
|
2023-07-16 12:16:34 -03:00
|
|
|
expected_effects = self.effect_counts(family.name)
|
|
|
|
for member in members:
|
2023-01-30 15:23:57 -04:00
|
|
|
member_effects = self.effect_counts(member)
|
|
|
|
if member_effects != expected_effects:
|
2022-11-22 20:04:57 -04:00
|
|
|
self.error(
|
2022-11-17 21:06:07 -04:00
|
|
|
f"Family {family.name!r} has inconsistent "
|
2023-01-30 15:23:57 -04:00
|
|
|
f"(cache, input, output) effects:\n"
|
|
|
|
f" {family.members[0]} = {expected_effects}; "
|
|
|
|
f"{member} = {member_effects}",
|
2022-11-22 20:04:57 -04:00
|
|
|
family,
|
2022-11-17 21:06:07 -04:00
|
|
|
)
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2023-01-30 15:23:57 -04:00
|
|
|
def effect_counts(self, name: str) -> tuple[int, int, int]:
|
|
|
|
if instr := self.instrs.get(name):
|
|
|
|
cache = instr.cache_offset
|
|
|
|
input = len(instr.input_effects)
|
|
|
|
output = len(instr.output_effects)
|
2023-06-28 15:28:07 -03:00
|
|
|
elif mac := self.macro_instrs.get(name):
|
|
|
|
cache = mac.cache_offset
|
|
|
|
input, output = 0, 0
|
|
|
|
for part in mac.parts:
|
2023-01-30 15:23:57 -04:00
|
|
|
if isinstance(part, Component):
|
|
|
|
# A component may pop what the previous component pushed,
|
|
|
|
# so we offset the input/output counts by that.
|
|
|
|
delta_i = len(part.instr.input_effects)
|
|
|
|
delta_o = len(part.instr.output_effects)
|
|
|
|
offset = min(delta_i, output)
|
|
|
|
input += delta_i - offset
|
|
|
|
output += delta_o - offset
|
|
|
|
else:
|
|
|
|
assert False, f"Unknown instruction {name!r}"
|
|
|
|
return cache, input, output
|
|
|
|
|
2023-06-12 14:47:08 -03:00
|
|
|
def analyze_macros_and_pseudos(self) -> None:
|
2023-06-13 17:42:03 -03:00
|
|
|
"""Analyze each macro and pseudo instruction."""
|
2022-12-02 23:57:30 -04:00
|
|
|
self.macro_instrs = {}
|
2023-06-11 18:31:59 -03:00
|
|
|
self.pseudo_instrs = {}
|
2022-12-02 23:57:30 -04:00
|
|
|
for name, macro in self.macros.items():
|
|
|
|
self.macro_instrs[name] = self.analyze_macro(macro)
|
2023-06-11 18:31:59 -03:00
|
|
|
for name, pseudo in self.pseudos.items():
|
|
|
|
self.pseudo_instrs[name] = self.analyze_pseudo(pseudo)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
def analyze_macro(self, macro: parser.Macro) -> MacroInstruction:
|
|
|
|
components = self.check_macro_components(macro)
|
|
|
|
stack, initial_sp = self.stack_analysis(components)
|
|
|
|
sp = initial_sp
|
2023-06-29 17:02:25 -03:00
|
|
|
parts: MacroParts = []
|
2023-06-21 20:14:43 -03:00
|
|
|
flags = InstructionFlags.newEmpty()
|
2023-06-28 15:28:07 -03:00
|
|
|
offset = 0
|
2022-12-02 23:57:30 -04:00
|
|
|
for component in components:
|
|
|
|
match component:
|
|
|
|
case parser.CacheEffect() as ceffect:
|
|
|
|
parts.append(ceffect)
|
2023-06-28 15:28:07 -03:00
|
|
|
offset += ceffect.size
|
2022-12-02 23:57:30 -04:00
|
|
|
case Instruction() as instr:
|
2023-06-28 15:28:07 -03:00
|
|
|
part, sp, offset = self.analyze_instruction(instr, stack, sp, offset)
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
parts.append(part)
|
2023-06-21 20:14:43 -03:00
|
|
|
flags.add(instr.instr_flags)
|
2022-12-02 23:57:30 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(component)
|
|
|
|
final_sp = sp
|
2023-06-28 15:28:07 -03:00
|
|
|
format = "IB"
|
|
|
|
if offset:
|
|
|
|
format += "C" + "0"*(offset-1)
|
2023-01-29 21:28:39 -04:00
|
|
|
return MacroInstruction(
|
2023-06-28 15:28:07 -03:00
|
|
|
macro.name, stack, initial_sp, final_sp, format, flags, macro, parts, offset
|
2023-01-29 21:28:39 -04:00
|
|
|
)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
def analyze_pseudo(self, pseudo: parser.Pseudo) -> PseudoInstruction:
|
|
|
|
targets = [self.instrs[target] for target in pseudo.targets]
|
|
|
|
assert targets
|
|
|
|
# Make sure the targets have the same fmt
|
|
|
|
fmts = list(set([t.instr_fmt for t in targets]))
|
|
|
|
assert(len(fmts) == 1)
|
2023-06-21 20:14:43 -03:00
|
|
|
assert(len(list(set([t.instr_flags.bitmap() for t in targets]))) == 1)
|
|
|
|
return PseudoInstruction(pseudo.name, targets, fmts[0], targets[0].instr_flags)
|
2023-06-11 18:31:59 -03:00
|
|
|
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
def analyze_instruction(
|
2023-06-28 15:28:07 -03:00
|
|
|
self, instr: Instruction, stack: list[StackEffect], sp: int, offset: int
|
|
|
|
) -> tuple[Component, int, int]:
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
input_mapping: StackEffectMapping = []
|
|
|
|
for ieffect in reversed(instr.input_effects):
|
|
|
|
sp -= 1
|
|
|
|
input_mapping.append((stack[sp], ieffect))
|
|
|
|
output_mapping: StackEffectMapping = []
|
|
|
|
for oeffect in instr.output_effects:
|
|
|
|
output_mapping.append((stack[sp], oeffect))
|
|
|
|
sp += 1
|
2023-06-28 15:28:07 -03:00
|
|
|
active_effects: list[ActiveCacheEffect] = []
|
|
|
|
for ceffect in instr.cache_effects:
|
|
|
|
if ceffect.name != UNUSED:
|
|
|
|
active_effects.append(ActiveCacheEffect(ceffect, offset))
|
|
|
|
offset += ceffect.size
|
|
|
|
return Component(instr, input_mapping, output_mapping, active_effects), sp, offset
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
def check_macro_components(
|
|
|
|
self, macro: parser.Macro
|
|
|
|
) -> list[InstructionOrCacheEffect]:
|
|
|
|
components: list[InstructionOrCacheEffect] = []
|
|
|
|
for uop in macro.uops:
|
|
|
|
match uop:
|
|
|
|
case parser.OpName(name):
|
|
|
|
if name not in self.instrs:
|
|
|
|
self.error(f"Unknown instruction {name!r}", macro)
|
|
|
|
components.append(self.instrs[name])
|
|
|
|
case parser.CacheEffect():
|
|
|
|
components.append(uop)
|
|
|
|
case _:
|
|
|
|
typing.assert_never(uop)
|
|
|
|
return components
|
|
|
|
|
|
|
|
def stack_analysis(
|
|
|
|
self, components: typing.Iterable[InstructionOrCacheEffect]
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
) -> tuple[list[StackEffect], int]:
|
2023-06-12 14:47:08 -03:00
|
|
|
"""Analyze a macro.
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2022-12-27 21:11:03 -04:00
|
|
|
Ignore cache effects.
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-06-14 17:50:48 -03:00
|
|
|
Return the list of variables (as StackEffects) and the initial stack pointer.
|
2022-12-02 23:57:30 -04:00
|
|
|
"""
|
|
|
|
lowest = current = highest = 0
|
2023-06-14 17:50:48 -03:00
|
|
|
conditions: dict[int, str] = {} # Indexed by 'current'.
|
|
|
|
last_instr: Instruction | None = None
|
|
|
|
for thing in components:
|
|
|
|
if isinstance(thing, Instruction):
|
|
|
|
last_instr = thing
|
2022-12-02 23:57:30 -04:00
|
|
|
for thing in components:
|
|
|
|
match thing:
|
|
|
|
case Instruction() as instr:
|
2023-01-29 21:28:39 -04:00
|
|
|
if any(
|
|
|
|
eff.size for eff in instr.input_effects + instr.output_effects
|
|
|
|
):
|
2023-01-17 19:59:19 -04:00
|
|
|
# TODO: Eventually this will be needed, at least for macros.
|
|
|
|
self.error(
|
|
|
|
f"Instruction {instr.name!r} has variable-sized stack effect, "
|
2023-06-12 14:47:08 -03:00
|
|
|
"which are not supported in macro instructions",
|
|
|
|
instr.inst, # TODO: Pass name+location of macro
|
2023-01-17 19:59:19 -04:00
|
|
|
)
|
2023-06-14 17:50:48 -03:00
|
|
|
if any(eff.cond for eff in instr.input_effects):
|
|
|
|
self.error(
|
|
|
|
f"Instruction {instr.name!r} has conditional input stack effect, "
|
|
|
|
"which are not supported in macro instructions",
|
|
|
|
instr.inst, # TODO: Pass name+location of macro
|
|
|
|
)
|
|
|
|
if any(eff.cond for eff in instr.output_effects) and instr is not last_instr:
|
|
|
|
self.error(
|
|
|
|
f"Instruction {instr.name!r} has conditional output stack effect, "
|
|
|
|
"but is not the last instruction in a macro",
|
|
|
|
instr.inst, # TODO: Pass name+location of macro
|
|
|
|
)
|
2022-12-02 23:57:30 -04:00
|
|
|
current -= len(instr.input_effects)
|
|
|
|
lowest = min(lowest, current)
|
2023-06-14 17:50:48 -03:00
|
|
|
for eff in instr.output_effects:
|
|
|
|
if eff.cond:
|
|
|
|
conditions[current] = eff.cond
|
|
|
|
current += 1
|
2022-12-02 23:57:30 -04:00
|
|
|
highest = max(highest, current)
|
|
|
|
case parser.CacheEffect():
|
|
|
|
pass
|
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
# At this point, 'current' is the net stack effect,
|
|
|
|
# and 'lowest' and 'highest' are the extremes.
|
|
|
|
# Note that 'lowest' may be negative.
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
stack = [
|
2023-06-14 17:50:48 -03:00
|
|
|
StackEffect(f"_tmp_{i}", "", conditions.get(highest - i, ""))
|
|
|
|
for i in reversed(range(1, highest - lowest + 1))
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
]
|
2022-12-02 23:57:30 -04:00
|
|
|
return stack, -lowest
|
|
|
|
|
2023-01-25 16:41:03 -04:00
|
|
|
def get_stack_effect_info(
|
2023-06-12 14:47:08 -03:00
|
|
|
self, thing: parser.InstDef | parser.Macro | parser.Pseudo
|
2023-06-28 15:28:07 -03:00
|
|
|
) -> tuple[AnyInstruction | None, str | None, str | None]:
|
2023-01-29 21:28:39 -04:00
|
|
|
def effect_str(effects: list[StackEffect]) -> str:
|
|
|
|
n_effect, sym_effect = list_effect_size(effects)
|
2023-01-25 16:41:03 -04:00
|
|
|
if sym_effect:
|
|
|
|
return f"{sym_effect} + {n_effect}" if n_effect else sym_effect
|
|
|
|
return str(n_effect)
|
|
|
|
|
2023-01-29 21:28:39 -04:00
|
|
|
instr: AnyInstruction | None
|
2023-01-25 16:41:03 -04:00
|
|
|
match thing:
|
|
|
|
case parser.InstDef():
|
|
|
|
if thing.kind != "op":
|
|
|
|
instr = self.instrs[thing.name]
|
|
|
|
popped = effect_str(instr.input_effects)
|
|
|
|
pushed = effect_str(instr.output_effects)
|
2023-01-26 13:15:05 -04:00
|
|
|
else:
|
|
|
|
instr = None
|
2023-01-29 21:28:39 -04:00
|
|
|
popped = ""
|
|
|
|
pushed = ""
|
2023-01-25 16:41:03 -04:00
|
|
|
case parser.Macro():
|
|
|
|
instr = self.macro_instrs[thing.name]
|
|
|
|
parts = [comp for comp in instr.parts if isinstance(comp, Component)]
|
2023-05-31 12:09:23 -03:00
|
|
|
# Note: stack_analysis() already verifies that macro components
|
|
|
|
# have no variable-sized stack effects.
|
|
|
|
low = 0
|
|
|
|
sp = 0
|
|
|
|
high = 0
|
2023-06-14 17:50:48 -03:00
|
|
|
pushed_symbolic: list[str] = []
|
2023-05-31 12:09:23 -03:00
|
|
|
for comp in parts:
|
|
|
|
for effect in comp.instr.input_effects:
|
|
|
|
assert not effect.cond, effect
|
|
|
|
assert not effect.size, effect
|
|
|
|
sp -= 1
|
|
|
|
low = min(low, sp)
|
|
|
|
for effect in comp.instr.output_effects:
|
|
|
|
assert not effect.size, effect
|
2023-06-14 17:50:48 -03:00
|
|
|
if effect.cond:
|
2023-07-16 21:09:11 -03:00
|
|
|
if effect.cond in ("0", "1"):
|
|
|
|
pushed_symbolic.append(effect.cond)
|
|
|
|
else:
|
|
|
|
pushed_symbolic.append(maybe_parenthesize(f"{maybe_parenthesize(effect.cond)} ? 1 : 0"))
|
2023-05-31 12:09:23 -03:00
|
|
|
sp += 1
|
|
|
|
high = max(sp, high)
|
|
|
|
if high != max(0, sp):
|
|
|
|
# If you get this, intermediate stack growth occurs,
|
|
|
|
# and stack size calculations may go awry.
|
|
|
|
# E.g. [push, pop]. The fix would be for stack size
|
|
|
|
# calculations to use the micro ops.
|
|
|
|
self.error("Macro has virtual stack growth", thing)
|
|
|
|
popped = str(-low)
|
2023-06-14 17:50:48 -03:00
|
|
|
pushed_symbolic.append(str(sp - low - len(pushed_symbolic)))
|
|
|
|
pushed = " + ".join(pushed_symbolic)
|
2023-06-11 18:31:59 -03:00
|
|
|
case parser.Pseudo():
|
2023-06-14 14:06:50 -03:00
|
|
|
instr = self.pseudo_instrs[thing.name]
|
2023-06-11 18:31:59 -03:00
|
|
|
popped = pushed = None
|
|
|
|
# Calculate stack effect, and check that it's the the same
|
|
|
|
# for all targets.
|
|
|
|
for target in self.pseudos[thing.name].targets:
|
|
|
|
target_instr = self.instrs.get(target)
|
|
|
|
# Currently target is always an instr. This could change
|
|
|
|
# in the future, e.g., if we have a pseudo targetting a
|
|
|
|
# macro instruction.
|
|
|
|
assert target_instr
|
|
|
|
target_popped = effect_str(target_instr.input_effects)
|
|
|
|
target_pushed = effect_str(target_instr.output_effects)
|
|
|
|
if popped is None and pushed is None:
|
|
|
|
popped, pushed = target_popped, target_pushed
|
|
|
|
else:
|
|
|
|
assert popped == target_popped
|
|
|
|
assert pushed == target_pushed
|
2023-01-25 16:41:03 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
return instr, popped, pushed
|
|
|
|
|
|
|
|
def write_stack_effect_functions(self) -> None:
|
2023-01-29 21:28:39 -04:00
|
|
|
popped_data: list[tuple[AnyInstruction, str]] = []
|
|
|
|
pushed_data: list[tuple[AnyInstruction, str]] = []
|
2023-01-25 16:41:03 -04:00
|
|
|
for thing in self.everything:
|
2023-03-04 00:59:21 -04:00
|
|
|
if isinstance(thing, OverriddenInstructionPlaceHolder):
|
|
|
|
continue
|
2023-01-25 16:41:03 -04:00
|
|
|
instr, popped, pushed = self.get_stack_effect_info(thing)
|
2023-01-26 13:15:05 -04:00
|
|
|
if instr is not None:
|
2023-06-28 15:28:07 -03:00
|
|
|
assert popped is not None and pushed is not None
|
2023-01-29 21:28:39 -04:00
|
|
|
popped_data.append((instr, popped))
|
|
|
|
pushed_data.append((instr, pushed))
|
2023-01-25 16:41:03 -04:00
|
|
|
|
2023-01-30 21:27:51 -04:00
|
|
|
def write_function(
|
|
|
|
direction: str, data: list[tuple[AnyInstruction, str]]
|
|
|
|
) -> None:
|
2023-02-08 20:23:19 -04:00
|
|
|
self.out.emit("")
|
2023-03-31 14:17:59 -03:00
|
|
|
self.out.emit("#ifndef NEED_OPCODE_METADATA")
|
2023-02-08 20:23:19 -04:00
|
|
|
self.out.emit(f"extern int _PyOpcode_num_{direction}(int opcode, int oparg, bool jump);")
|
|
|
|
self.out.emit("#else")
|
|
|
|
self.out.emit("int")
|
2023-02-01 15:38:06 -04:00
|
|
|
self.out.emit(f"_PyOpcode_num_{direction}(int opcode, int oparg, bool jump) {{")
|
2023-01-29 21:28:39 -04:00
|
|
|
self.out.emit(" switch(opcode) {")
|
2023-01-25 16:41:03 -04:00
|
|
|
for instr, effect in data:
|
|
|
|
self.out.emit(f" case {instr.name}:")
|
|
|
|
self.out.emit(f" return {effect};")
|
|
|
|
self.out.emit(" default:")
|
2023-02-08 20:23:19 -04:00
|
|
|
self.out.emit(" return -1;")
|
2023-01-25 16:41:03 -04:00
|
|
|
self.out.emit(" }")
|
|
|
|
self.out.emit("}")
|
2023-01-29 21:28:39 -04:00
|
|
|
self.out.emit("#endif")
|
2023-01-25 16:41:03 -04:00
|
|
|
|
2023-01-29 21:28:39 -04:00
|
|
|
write_function("popped", popped_data)
|
|
|
|
write_function("pushed", pushed_data)
|
2023-02-08 20:23:19 -04:00
|
|
|
self.out.emit("")
|
2023-01-25 16:41:03 -04:00
|
|
|
|
2023-03-04 00:59:21 -04:00
|
|
|
def from_source_files(self) -> str:
|
2023-06-19 19:47:04 -03:00
|
|
|
paths = f"\n{self.out.comment} ".join(
|
2023-07-16 20:05:24 -03:00
|
|
|
prettify_filename(filename)
|
2023-03-04 00:59:21 -04:00
|
|
|
for filename in self.input_filenames
|
|
|
|
)
|
2023-06-19 19:47:04 -03:00
|
|
|
return f"{self.out.comment} from:\n{self.out.comment} {paths}\n"
|
|
|
|
|
|
|
|
def write_provenance_header(self):
|
|
|
|
self.out.write_raw(f"{self.out.comment} This file is generated by {THIS}\n")
|
|
|
|
self.out.write_raw(self.from_source_files())
|
|
|
|
self.out.write_raw(f"{self.out.comment} Do not edit!\n")
|
2023-03-04 00:59:21 -04:00
|
|
|
|
2023-01-05 17:01:07 -04:00
|
|
|
def write_metadata(self) -> None:
|
|
|
|
"""Write instruction metadata to output file."""
|
2023-01-09 19:53:01 -04:00
|
|
|
|
|
|
|
# Compute the set of all instruction formats.
|
|
|
|
all_formats: set[str] = set()
|
|
|
|
for thing in self.everything:
|
|
|
|
match thing:
|
2023-03-04 00:59:21 -04:00
|
|
|
case OverriddenInstructionPlaceHolder():
|
|
|
|
continue
|
2023-01-09 19:53:01 -04:00
|
|
|
case parser.InstDef():
|
|
|
|
format = self.instrs[thing.name].instr_fmt
|
|
|
|
case parser.Macro():
|
|
|
|
format = self.macro_instrs[thing.name].instr_fmt
|
2023-06-11 18:31:59 -03:00
|
|
|
case parser.Pseudo():
|
|
|
|
format = None
|
|
|
|
for target in self.pseudos[thing.name].targets:
|
|
|
|
target_instr = self.instrs.get(target)
|
|
|
|
assert target_instr
|
|
|
|
if format is None:
|
|
|
|
format = target_instr.instr_fmt
|
|
|
|
else:
|
|
|
|
assert format == target_instr.instr_fmt
|
2023-01-09 19:53:01 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
all_formats.add(format)
|
|
|
|
# Turn it into a list of enum definitions.
|
|
|
|
format_enums = [INSTR_FMT_PREFIX + format for format in sorted(all_formats)]
|
|
|
|
|
2023-02-08 20:23:19 -04:00
|
|
|
with open(self.metadata_filename, "w") as f:
|
2023-03-15 12:37:36 -03:00
|
|
|
# Create formatter
|
|
|
|
self.out = Formatter(f, 0)
|
|
|
|
|
2023-06-19 19:47:04 -03:00
|
|
|
self.write_provenance_header()
|
2023-01-05 17:01:07 -04:00
|
|
|
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("\n#include <stdbool.h>")
|
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
self.write_pseudo_instrs()
|
2023-01-05 17:01:07 -04:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
self.out.emit("")
|
|
|
|
self.write_uop_items(lambda name, counter: f"#define {name} {counter}")
|
2023-06-26 23:02:57 -03:00
|
|
|
|
2023-01-25 16:41:03 -04:00
|
|
|
self.write_stack_effect_functions()
|
|
|
|
|
2023-02-08 20:23:19 -04:00
|
|
|
# Write type definitions
|
2023-01-09 19:53:01 -04:00
|
|
|
self.out.emit(f"enum InstructionFormat {{ {', '.join(format_enums)} }};")
|
2023-06-21 20:14:43 -03:00
|
|
|
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("")
|
|
|
|
self.out.emit(
|
|
|
|
"#define IS_VALID_OPCODE(OP) \\\n"
|
|
|
|
" (((OP) >= 0) && ((OP) < OPCODE_METADATA_SIZE) && \\\n"
|
|
|
|
" (_PyOpcode_opcode_metadata[(OP)].valid_entry))")
|
|
|
|
|
|
|
|
self.out.emit("")
|
2023-06-21 20:14:43 -03:00
|
|
|
InstructionFlags.emit_macros(self.out)
|
2023-06-13 17:42:03 -03:00
|
|
|
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("")
|
2023-06-26 23:02:57 -03:00
|
|
|
with self.out.block("struct opcode_metadata", ";"):
|
2023-01-05 17:01:07 -04:00
|
|
|
self.out.emit("bool valid_entry;")
|
2023-01-09 19:53:01 -04:00
|
|
|
self.out.emit("enum InstructionFormat instr_format;")
|
2023-06-13 17:42:03 -03:00
|
|
|
self.out.emit("int flags;")
|
2023-06-26 23:02:57 -03:00
|
|
|
self.out.emit("")
|
|
|
|
|
|
|
|
with self.out.block("struct opcode_macro_expansion", ";"):
|
|
|
|
self.out.emit("int nuops;")
|
|
|
|
self.out.emit("struct { int16_t uop; int8_t size; int8_t offset; } uops[8];")
|
|
|
|
self.out.emit("")
|
|
|
|
|
2023-07-07 14:42:10 -03:00
|
|
|
for key, value in OPARG_SIZES.items():
|
|
|
|
self.out.emit(f"#define {key} {value}")
|
2023-02-08 20:23:19 -04:00
|
|
|
self.out.emit("")
|
2023-07-07 14:42:10 -03:00
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
self.out.emit("#define OPCODE_METADATA_FMT(OP) "
|
|
|
|
"(_PyOpcode_opcode_metadata[(OP)].instr_format)")
|
|
|
|
self.out.emit("#define SAME_OPCODE_METADATA(OP1, OP2) \\")
|
|
|
|
self.out.emit(" (OPCODE_METADATA_FMT(OP1) == OPCODE_METADATA_FMT(OP2))")
|
|
|
|
self.out.emit("")
|
2023-02-08 20:23:19 -04:00
|
|
|
|
|
|
|
# Write metadata array declaration
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("#define OPCODE_METADATA_SIZE 512")
|
|
|
|
self.out.emit("#define OPCODE_UOP_NAME_SIZE 512")
|
|
|
|
self.out.emit("#define OPCODE_MACRO_EXPANSION_SIZE 256")
|
|
|
|
self.out.emit("")
|
2023-03-31 14:17:59 -03:00
|
|
|
self.out.emit("#ifndef NEED_OPCODE_METADATA")
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("extern const struct opcode_metadata "
|
|
|
|
"_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE];")
|
|
|
|
self.out.emit("extern const struct opcode_macro_expansion "
|
|
|
|
"_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE];")
|
|
|
|
self.out.emit("extern const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE];")
|
2023-07-11 09:41:50 -03:00
|
|
|
self.out.emit("#else // if NEED_OPCODE_METADATA")
|
2023-06-26 23:02:57 -03:00
|
|
|
|
2023-07-14 14:41:52 -03:00
|
|
|
self.out.emit("const struct opcode_metadata "
|
|
|
|
"_PyOpcode_opcode_metadata[OPCODE_METADATA_SIZE] = {")
|
2023-01-05 17:01:07 -04:00
|
|
|
|
|
|
|
# Write metadata for each instruction
|
|
|
|
for thing in self.everything:
|
|
|
|
match thing:
|
2023-03-04 00:59:21 -04:00
|
|
|
case OverriddenInstructionPlaceHolder():
|
|
|
|
continue
|
2023-01-05 17:01:07 -04:00
|
|
|
case parser.InstDef():
|
|
|
|
if thing.kind != "op":
|
|
|
|
self.write_metadata_for_inst(self.instrs[thing.name])
|
|
|
|
case parser.Macro():
|
|
|
|
self.write_metadata_for_macro(self.macro_instrs[thing.name])
|
2023-06-11 18:31:59 -03:00
|
|
|
case parser.Pseudo():
|
|
|
|
self.write_metadata_for_pseudo(self.pseudo_instrs[thing.name])
|
2023-01-05 17:01:07 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
|
|
|
|
# Write end of array
|
|
|
|
self.out.emit("};")
|
2023-06-26 23:02:57 -03:00
|
|
|
|
|
|
|
with self.out.block(
|
2023-07-14 14:41:52 -03:00
|
|
|
"const struct opcode_macro_expansion "
|
|
|
|
"_PyOpcode_macro_expansion[OPCODE_MACRO_EXPANSION_SIZE] =",
|
2023-06-26 23:02:57 -03:00
|
|
|
";",
|
|
|
|
):
|
|
|
|
# Write macro expansion for each non-pseudo instruction
|
|
|
|
for thing in self.everything:
|
|
|
|
match thing:
|
|
|
|
case OverriddenInstructionPlaceHolder():
|
|
|
|
pass
|
|
|
|
case parser.InstDef(name=name):
|
|
|
|
instr = self.instrs[name]
|
2023-06-29 17:02:25 -03:00
|
|
|
# Since an 'op' is not a bytecode, it has no expansion; but 'inst' is
|
|
|
|
if instr.kind == "inst" and instr.is_viable_uop():
|
|
|
|
# Construct a dummy Component -- input/output mappings are not used
|
|
|
|
part = Component(instr, [], [], instr.active_caches)
|
|
|
|
self.write_macro_expansions(instr.name, [part])
|
2023-07-07 14:42:10 -03:00
|
|
|
elif instr.kind == "inst" and variable_used(instr.inst, "oparg1"):
|
|
|
|
assert variable_used(instr.inst, "oparg2"), "Half super-instr?"
|
|
|
|
self.write_super_expansions(instr.name)
|
2023-06-26 23:02:57 -03:00
|
|
|
case parser.Macro():
|
2023-06-29 17:02:25 -03:00
|
|
|
mac = self.macro_instrs[thing.name]
|
|
|
|
self.write_macro_expansions(mac.name, mac.parts)
|
2023-06-26 23:02:57 -03:00
|
|
|
case parser.Pseudo():
|
|
|
|
pass
|
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
|
2023-07-14 14:41:52 -03:00
|
|
|
with self.out.block("const char * const _PyOpcode_uop_name[OPCODE_UOP_NAME_SIZE] =", ";"):
|
2023-07-13 16:14:51 -03:00
|
|
|
self.write_uop_items(lambda name, counter: f"[{name}] = \"{name}\",")
|
2023-06-28 15:28:07 -03:00
|
|
|
|
2023-07-11 09:41:50 -03:00
|
|
|
self.out.emit("#endif // NEED_OPCODE_METADATA")
|
2023-01-05 17:01:07 -04:00
|
|
|
|
2023-06-19 19:47:04 -03:00
|
|
|
with open(self.pymetadata_filename, "w") as f:
|
|
|
|
# Create formatter
|
|
|
|
self.out = Formatter(f, 0, comment = "#")
|
|
|
|
|
|
|
|
self.write_provenance_header()
|
|
|
|
|
|
|
|
self.out.emit("")
|
|
|
|
self.out.emit("_specializations = {")
|
|
|
|
for name, family in self.families.items():
|
|
|
|
with self.out.indent():
|
2023-07-16 12:16:34 -03:00
|
|
|
self.out.emit(f"\"{family.name}\": [")
|
2023-06-19 19:47:04 -03:00
|
|
|
with self.out.indent():
|
2023-07-16 12:16:34 -03:00
|
|
|
for m in family.members:
|
2023-06-19 19:47:04 -03:00
|
|
|
self.out.emit(f"\"{m}\",")
|
|
|
|
self.out.emit(f"],")
|
|
|
|
self.out.emit("}")
|
|
|
|
|
|
|
|
# Handle special case
|
|
|
|
self.out.emit("")
|
|
|
|
self.out.emit("# An irregular case:")
|
|
|
|
self.out.emit(
|
|
|
|
"_specializations[\"BINARY_OP\"].append("
|
|
|
|
"\"BINARY_OP_INPLACE_ADD_UNICODE\")")
|
|
|
|
|
|
|
|
# Make list of specialized instructions
|
|
|
|
self.out.emit("")
|
|
|
|
self.out.emit(
|
|
|
|
"_specialized_instructions = ["
|
|
|
|
"opcode for family in _specializations.values() for opcode in family"
|
|
|
|
"]")
|
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
def write_pseudo_instrs(self) -> None:
|
|
|
|
"""Write the IS_PSEUDO_INSTR macro"""
|
2023-07-01 07:28:07 -03:00
|
|
|
self.out.emit("\n\n#define IS_PSEUDO_INSTR(OP) ( \\")
|
2023-06-11 18:31:59 -03:00
|
|
|
for op in self.pseudos:
|
|
|
|
self.out.emit(f" ((OP) == {op}) || \\")
|
2023-07-01 07:28:07 -03:00
|
|
|
self.out.emit(f" 0)")
|
2023-06-11 18:31:59 -03:00
|
|
|
|
2023-06-28 15:28:07 -03:00
|
|
|
def write_uop_items(self, make_text: typing.Callable[[str, int], str]) -> None:
|
2023-06-26 23:02:57 -03:00
|
|
|
"""Write '#define XXX NNN' for each uop"""
|
2023-06-28 15:28:07 -03:00
|
|
|
counter = 300 # TODO: Avoid collision with pseudo instructions
|
2023-07-13 16:14:51 -03:00
|
|
|
seen = set()
|
2023-07-10 20:04:26 -03:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
def add(name: str) -> None:
|
2023-07-13 16:14:51 -03:00
|
|
|
if name in seen:
|
|
|
|
return
|
2023-06-26 23:02:57 -03:00
|
|
|
nonlocal counter
|
2023-06-28 15:28:07 -03:00
|
|
|
self.out.emit(make_text(name, counter))
|
2023-06-26 23:02:57 -03:00
|
|
|
counter += 1
|
2023-07-13 16:14:51 -03:00
|
|
|
seen.add(name)
|
2023-07-10 20:04:26 -03:00
|
|
|
|
2023-07-13 16:14:51 -03:00
|
|
|
# These two are first by convention
|
2023-06-26 23:02:57 -03:00
|
|
|
add("EXIT_TRACE")
|
2023-07-03 17:05:11 -03:00
|
|
|
add("SAVE_IP")
|
2023-07-10 20:04:26 -03:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
for instr in self.instrs.values():
|
|
|
|
if instr.kind == "op" and instr.is_viable_uop():
|
|
|
|
add(instr.name)
|
|
|
|
|
2023-06-29 17:02:25 -03:00
|
|
|
def write_macro_expansions(self, name: str, parts: MacroParts) -> None:
|
2023-06-28 15:28:07 -03:00
|
|
|
"""Write the macro expansions for a macro-instruction."""
|
|
|
|
# TODO: Refactor to share code with write_cody(), is_viaible_uop(), etc.
|
|
|
|
offset = 0 # Cache effect offset
|
|
|
|
expansions: list[tuple[str, int, int]] = [] # [(name, size, offset), ...]
|
2023-06-29 17:02:25 -03:00
|
|
|
for part in parts:
|
2023-06-28 15:28:07 -03:00
|
|
|
if isinstance(part, Component):
|
|
|
|
# All component instructions must be viable uops
|
|
|
|
if not part.instr.is_viable_uop():
|
2023-06-29 17:02:25 -03:00
|
|
|
print(f"NOTE: Part {part.instr.name} of {name} is not a viable uop")
|
2023-06-28 15:28:07 -03:00
|
|
|
return
|
2023-07-17 16:12:33 -03:00
|
|
|
if not part.active_caches:
|
2023-07-07 14:42:10 -03:00
|
|
|
size, offset = OPARG_SIZES["OPARG_FULL"], 0
|
2023-06-28 15:28:07 -03:00
|
|
|
else:
|
|
|
|
# If this assert triggers, is_viable_uops() lied
|
2023-06-29 17:02:25 -03:00
|
|
|
assert len(part.active_caches) == 1, (name, part.instr.name)
|
2023-06-28 15:28:07 -03:00
|
|
|
cache = part.active_caches[0]
|
|
|
|
size, offset = cache.effect.size, cache.offset
|
|
|
|
expansions.append((part.instr.name, size, offset))
|
2023-06-29 17:02:25 -03:00
|
|
|
assert len(expansions) > 0, f"Macro {name} has empty expansion?!"
|
2023-07-07 14:42:10 -03:00
|
|
|
self.write_expansions(name, expansions)
|
|
|
|
|
|
|
|
def write_super_expansions(self, name: str) -> None:
|
|
|
|
"""Write special macro expansions for super-instructions.
|
|
|
|
|
|
|
|
If you get an assertion failure here, you probably have accidentally
|
|
|
|
violated one of the assumptions here.
|
|
|
|
|
|
|
|
- A super-instruction's name is of the form FIRST_SECOND where
|
|
|
|
FIRST and SECOND are regular instructions whose name has the
|
|
|
|
form FOO_BAR. Thus, there must be exactly 3 underscores.
|
|
|
|
Example: LOAD_CONST_STORE_FAST.
|
|
|
|
|
|
|
|
- A super-instruction's body uses `oparg1 and `oparg2`, and no
|
|
|
|
other instruction's body uses those variable names.
|
|
|
|
|
|
|
|
- A super-instruction has no active (used) cache entries.
|
|
|
|
|
|
|
|
In the expansion, the first instruction's operand is all but the
|
|
|
|
bottom 4 bits of the super-instruction's oparg, and the second
|
|
|
|
instruction's operand is the bottom 4 bits. We use the special
|
|
|
|
size codes OPARG_TOP and OPARG_BOTTOM for these.
|
|
|
|
"""
|
|
|
|
pieces = name.split("_")
|
|
|
|
assert len(pieces) == 4, f"{name} doesn't look like a super-instr"
|
|
|
|
name1 = "_".join(pieces[:2])
|
|
|
|
name2 = "_".join(pieces[2:])
|
|
|
|
assert name1 in self.instrs, f"{name1} doesn't match any instr"
|
|
|
|
assert name2 in self.instrs, f"{name2} doesn't match any instr"
|
|
|
|
instr1 = self.instrs[name1]
|
|
|
|
instr2 = self.instrs[name2]
|
|
|
|
assert not instr1.active_caches, f"{name1} has active caches"
|
|
|
|
assert not instr2.active_caches, f"{name2} has active caches"
|
|
|
|
expansions = [
|
|
|
|
(name1, OPARG_SIZES["OPARG_TOP"], 0),
|
|
|
|
(name2, OPARG_SIZES["OPARG_BOTTOM"], 0),
|
|
|
|
]
|
|
|
|
self.write_expansions(name, expansions)
|
|
|
|
|
|
|
|
def write_expansions(self, name: str, expansions: list[tuple[str, int, int]]) -> None:
|
2023-06-28 15:28:07 -03:00
|
|
|
pieces = [f"{{ {name}, {size}, {offset} }}" for name, size, offset in expansions]
|
|
|
|
self.out.emit(
|
2023-06-29 17:02:25 -03:00
|
|
|
f"[{name}] = "
|
2023-07-07 14:42:10 -03:00
|
|
|
f"{{ .nuops = {len(pieces)}, .uops = {{ {', '.join(pieces)} }} }},"
|
2023-06-28 15:28:07 -03:00
|
|
|
)
|
|
|
|
|
2023-06-21 20:14:43 -03:00
|
|
|
def emit_metadata_entry(
|
|
|
|
self, name: str, fmt: str, flags: InstructionFlags
|
|
|
|
) -> None:
|
|
|
|
flag_names = flags.names(value=True)
|
|
|
|
if not flag_names:
|
|
|
|
flag_names.append("0")
|
2023-01-05 17:01:07 -04:00
|
|
|
self.out.emit(
|
2023-06-21 20:14:43 -03:00
|
|
|
f" [{name}] = {{ true, {INSTR_FMT_PREFIX}{fmt},"
|
|
|
|
f" {' | '.join(flag_names)} }},"
|
2023-01-05 17:01:07 -04:00
|
|
|
)
|
|
|
|
|
2023-06-13 17:42:03 -03:00
|
|
|
def write_metadata_for_inst(self, instr: Instruction) -> None:
|
|
|
|
"""Write metadata for a single instruction."""
|
2023-06-21 20:14:43 -03:00
|
|
|
self.emit_metadata_entry(instr.name, instr.instr_fmt, instr.instr_flags)
|
2023-06-13 17:42:03 -03:00
|
|
|
|
2023-01-05 17:01:07 -04:00
|
|
|
def write_metadata_for_macro(self, mac: MacroInstruction) -> None:
|
|
|
|
"""Write metadata for a macro-instruction."""
|
2023-06-21 20:14:43 -03:00
|
|
|
self.emit_metadata_entry(mac.name, mac.instr_fmt, mac.instr_flags)
|
2023-01-05 17:01:07 -04:00
|
|
|
|
2023-06-11 18:31:59 -03:00
|
|
|
def write_metadata_for_pseudo(self, ps: PseudoInstruction) -> None:
|
|
|
|
"""Write metadata for a macro-instruction."""
|
2023-06-21 20:14:43 -03:00
|
|
|
self.emit_metadata_entry(ps.name, ps.instr_fmt, ps.instr_flags)
|
2023-06-11 18:31:59 -03:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
def write_instructions(self) -> None:
|
2022-11-17 21:06:07 -04:00
|
|
|
"""Write instructions to output file."""
|
2022-12-02 23:57:30 -04:00
|
|
|
with open(self.output_filename, "w") as f:
|
2023-03-15 12:37:36 -03:00
|
|
|
# Create formatter
|
|
|
|
self.out = Formatter(f, 8, self.emit_line_directives)
|
2022-11-17 21:06:07 -04:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
self.write_provenance_header()
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2022-12-08 19:54:07 -04:00
|
|
|
# Write and count instructions of all kinds
|
2022-11-22 20:04:57 -04:00
|
|
|
n_instrs = 0
|
2022-12-02 23:57:30 -04:00
|
|
|
n_macros = 0
|
2023-06-11 18:31:59 -03:00
|
|
|
n_pseudos = 0
|
2022-12-08 19:54:07 -04:00
|
|
|
for thing in self.everything:
|
|
|
|
match thing:
|
2023-03-04 00:59:21 -04:00
|
|
|
case OverriddenInstructionPlaceHolder():
|
|
|
|
self.write_overridden_instr_place_holder(thing)
|
2022-12-08 19:54:07 -04:00
|
|
|
case parser.InstDef():
|
2023-01-05 17:01:07 -04:00
|
|
|
if thing.kind != "op":
|
2022-12-08 19:54:07 -04:00
|
|
|
n_instrs += 1
|
|
|
|
self.write_instr(self.instrs[thing.name])
|
|
|
|
case parser.Macro():
|
|
|
|
n_macros += 1
|
|
|
|
self.write_macro(self.macro_instrs[thing.name])
|
2023-06-11 18:31:59 -03:00
|
|
|
case parser.Pseudo():
|
|
|
|
n_pseudos += 1
|
2022-12-08 19:54:07 -04:00
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
print(
|
2023-06-12 14:47:08 -03:00
|
|
|
f"Wrote {n_instrs} instructions, {n_macros} macros, "
|
|
|
|
f"and {n_pseudos} pseudos to {self.output_filename}",
|
2022-12-02 23:57:30 -04:00
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2023-06-26 23:02:57 -03:00
|
|
|
def write_executor_instructions(self) -> None:
|
|
|
|
"""Generate cases for the Tier 2 interpreter."""
|
|
|
|
with open(self.executor_filename, "w") as f:
|
2023-06-27 10:56:39 -03:00
|
|
|
self.out = Formatter(f, 8, self.emit_line_directives)
|
2023-06-26 23:02:57 -03:00
|
|
|
self.write_provenance_header()
|
|
|
|
for thing in self.everything:
|
|
|
|
match thing:
|
|
|
|
case OverriddenInstructionPlaceHolder():
|
2023-06-28 15:28:07 -03:00
|
|
|
# TODO: Is this helpful?
|
2023-06-26 23:02:57 -03:00
|
|
|
self.write_overridden_instr_place_holder(thing)
|
|
|
|
case parser.InstDef():
|
|
|
|
instr = self.instrs[thing.name]
|
|
|
|
if instr.is_viable_uop():
|
|
|
|
self.out.emit("")
|
|
|
|
with self.out.block(f"case {thing.name}:"):
|
|
|
|
instr.write(self.out, tier=TIER_TWO)
|
|
|
|
self.out.emit("break;")
|
2023-07-17 15:02:58 -03:00
|
|
|
# elif instr.kind != "op":
|
|
|
|
# print(f"NOTE: {thing.name} is not a viable uop")
|
2023-06-26 23:02:57 -03:00
|
|
|
case parser.Macro():
|
2023-06-28 15:28:07 -03:00
|
|
|
pass
|
2023-06-26 23:02:57 -03:00
|
|
|
case parser.Pseudo():
|
|
|
|
pass
|
|
|
|
case _:
|
|
|
|
typing.assert_never(thing)
|
|
|
|
print(
|
|
|
|
f"Wrote some stuff to {self.executor_filename}",
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
|
|
|
|
2023-03-04 00:59:21 -04:00
|
|
|
def write_overridden_instr_place_holder(self,
|
|
|
|
place_holder: OverriddenInstructionPlaceHolder) -> None:
|
|
|
|
self.out.emit("")
|
|
|
|
self.out.emit(
|
2023-06-19 19:47:04 -03:00
|
|
|
f"{self.out.comment} TARGET({place_holder.name}) overridden by later definition")
|
2023-03-04 00:59:21 -04:00
|
|
|
|
2022-12-08 19:54:07 -04:00
|
|
|
def write_instr(self, instr: Instruction) -> None:
|
|
|
|
name = instr.name
|
|
|
|
self.out.emit("")
|
2023-03-04 00:59:21 -04:00
|
|
|
if instr.inst.override:
|
2023-06-19 19:47:04 -03:00
|
|
|
self.out.emit("{self.out.comment} Override")
|
2022-12-08 19:54:07 -04:00
|
|
|
with self.out.block(f"TARGET({name})"):
|
|
|
|
if instr.predicted:
|
|
|
|
self.out.emit(f"PREDICTED({name});")
|
|
|
|
instr.write(self.out)
|
|
|
|
if not instr.always_exits:
|
2023-02-08 00:03:22 -04:00
|
|
|
if instr.check_eval_breaker:
|
|
|
|
self.out.emit("CHECK_EVAL_BREAKER();")
|
2022-12-08 19:54:07 -04:00
|
|
|
self.out.emit(f"DISPATCH();")
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
def write_macro(self, mac: MacroInstruction) -> None:
|
|
|
|
"""Write code for a macro instruction."""
|
2023-01-30 21:27:51 -04:00
|
|
|
last_instr: Instruction | None = None
|
2023-06-12 14:47:08 -03:00
|
|
|
with self.wrap_macro(mac):
|
2022-12-02 23:57:30 -04:00
|
|
|
cache_adjust = 0
|
|
|
|
for part in mac.parts:
|
|
|
|
match part:
|
|
|
|
case parser.CacheEffect(size=size):
|
|
|
|
cache_adjust += size
|
|
|
|
case Component() as comp:
|
2023-01-30 21:27:51 -04:00
|
|
|
last_instr = comp.instr
|
2023-06-28 15:28:07 -03:00
|
|
|
comp.write_body(self.out)
|
2022-12-02 23:57:30 -04:00
|
|
|
cache_adjust += comp.instr.cache_offset
|
|
|
|
|
|
|
|
if cache_adjust:
|
2023-02-28 12:49:35 -04:00
|
|
|
self.out.emit(f"next_instr += {cache_adjust};")
|
2022-12-02 23:57:30 -04:00
|
|
|
|
2023-01-30 21:27:51 -04:00
|
|
|
if (
|
2023-07-16 12:16:34 -03:00
|
|
|
(family := self.families.get(mac.name))
|
|
|
|
and mac.name == family.name
|
2023-01-30 21:27:51 -04:00
|
|
|
and (cache_size := family.size)
|
|
|
|
):
|
|
|
|
self.out.emit(
|
|
|
|
f"static_assert({cache_size} == "
|
|
|
|
f'{cache_adjust}, "incorrect cache size");'
|
|
|
|
)
|
|
|
|
|
2022-12-02 23:57:30 -04:00
|
|
|
@contextlib.contextmanager
|
2023-06-12 14:47:08 -03:00
|
|
|
def wrap_macro(self, mac: MacroInstruction):
|
|
|
|
"""Boilerplate for macro instructions."""
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
# TODO: Somewhere (where?) make it so that if one instruction
|
|
|
|
# has an output that is input to another, and the variable names
|
|
|
|
# and types match and don't conflict with other instructions,
|
|
|
|
# that variable is declared with the right name and type in the
|
|
|
|
# outer block, rather than trusting the compiler to optimize it.
|
2022-12-02 23:57:30 -04:00
|
|
|
self.out.emit("")
|
2023-06-12 14:47:08 -03:00
|
|
|
with self.out.block(f"TARGET({mac.name})"):
|
|
|
|
if mac.predicted:
|
|
|
|
self.out.emit(f"PREDICTED({mac.name});")
|
2023-06-14 17:50:48 -03:00
|
|
|
|
|
|
|
# The input effects should have no conditionals.
|
|
|
|
# Only the output effects do (for now).
|
|
|
|
ieffects = [
|
|
|
|
StackEffect(eff.name, eff.type) if eff.cond else eff
|
|
|
|
for eff in mac.stack
|
|
|
|
]
|
|
|
|
|
|
|
|
for i, var in reversed(list(enumerate(ieffects))):
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
src = None
|
2023-06-12 14:47:08 -03:00
|
|
|
if i < mac.initial_sp:
|
|
|
|
src = StackEffect(f"stack_pointer[-{mac.initial_sp - i}]", "")
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
self.out.declare(var, src)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
yield
|
|
|
|
|
2023-06-14 17:50:48 -03:00
|
|
|
self.out.stack_adjust(ieffects[:mac.initial_sp], mac.stack[:mac.final_sp])
|
2023-01-17 19:59:19 -04:00
|
|
|
|
2023-06-12 14:47:08 -03:00
|
|
|
for i, var in enumerate(reversed(mac.stack[: mac.final_sp]), 1):
|
2023-02-28 12:49:35 -04:00
|
|
|
dst = StackEffect(f"stack_pointer[-{i}]", "")
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
self.out.assign(dst, var)
|
2022-12-02 23:57:30 -04:00
|
|
|
|
|
|
|
self.out.emit(f"DISPATCH();")
|
2022-11-22 20:04:57 -04:00
|
|
|
|
2022-11-03 01:31:26 -03:00
|
|
|
|
2023-07-16 20:05:24 -03:00
|
|
|
def prettify_filename(filename: str) -> str:
|
|
|
|
# Make filename more user-friendly and less platform-specific,
|
|
|
|
# it is only used for error reporting at this point.
|
|
|
|
filename = filename.replace("\\", "/")
|
|
|
|
if filename.startswith("./"):
|
|
|
|
filename = filename[2:]
|
|
|
|
if filename.endswith(".new"):
|
|
|
|
filename = filename[:-4]
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
2023-06-07 14:00:10 -03:00
|
|
|
def extract_block_text(block: parser.Block) -> tuple[list[str], bool, int]:
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
# Get lines of text with proper dedent
|
|
|
|
blocklines = block.text.splitlines(True)
|
2023-03-15 12:37:36 -03:00
|
|
|
first_token: lx.Token = block.tokens[0] # IndexError means the context is broken
|
|
|
|
block_line = first_token.begin[0]
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
# Remove blank lines from both ends
|
|
|
|
while blocklines and not blocklines[0].strip():
|
|
|
|
blocklines.pop(0)
|
2023-03-15 12:37:36 -03:00
|
|
|
block_line += 1
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
while blocklines and not blocklines[-1].strip():
|
|
|
|
blocklines.pop()
|
|
|
|
|
|
|
|
# Remove leading and trailing braces
|
|
|
|
assert blocklines and blocklines[0].strip() == "{"
|
|
|
|
assert blocklines and blocklines[-1].strip() == "}"
|
|
|
|
blocklines.pop()
|
|
|
|
blocklines.pop(0)
|
2023-03-15 12:37:36 -03:00
|
|
|
block_line += 1
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
# Remove trailing blank lines
|
|
|
|
while blocklines and not blocklines[-1].strip():
|
|
|
|
blocklines.pop()
|
|
|
|
|
2023-02-08 00:03:22 -04:00
|
|
|
# Separate CHECK_EVAL_BREAKER() macro from end
|
|
|
|
check_eval_breaker = \
|
|
|
|
blocklines != [] and blocklines[-1].strip() == "CHECK_EVAL_BREAKER();"
|
|
|
|
if check_eval_breaker:
|
|
|
|
del blocklines[-1]
|
|
|
|
|
2023-06-07 14:00:10 -03:00
|
|
|
return blocklines, check_eval_breaker, block_line
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
|
|
|
|
|
|
|
|
def always_exits(lines: list[str]) -> bool:
|
2022-11-17 21:06:07 -04:00
|
|
|
"""Determine whether a block always ends in a return/goto/etc."""
|
2022-11-03 01:31:26 -03:00
|
|
|
if not lines:
|
|
|
|
return False
|
GH-98831: Typed stack effects, and more instructions converted (#99764)
Stack effects can now have a type, e.g. `inst(X, (left, right -- jump/uint64_t)) { ... }`.
Instructions converted to the non-legacy format:
* COMPARE_OP
* COMPARE_OP_FLOAT_JUMP
* COMPARE_OP_INT_JUMP
* COMPARE_OP_STR_JUMP
* STORE_ATTR
* DELETE_ATTR
* STORE_GLOBAL
* STORE_ATTR_INSTANCE_VALUE
* STORE_ATTR_WITH_HINT
* STORE_ATTR_SLOT, and complete the store_attr family
* Complete the store_subscr family: STORE_SUBSCR{,DICT,LIST_INT}
(STORE_SUBSCR was alread half converted,
but wasn't using cache effects yet.)
* DELETE_SUBSCR
* PRINT_EXPR
* INTERPRETER_EXIT (a bit weird, ends in return)
* RETURN_VALUE
* GET_AITER (had to restructure it some)
The original had mysterious `SET_TOP(NULL)` before `goto error`.
I assume those just account for `obj` having been decref'ed,
so I got rid of them in favor of the cleanup implied by `ERROR_IF()`.
* LIST_APPEND (a bit unhappy with it)
* SET_ADD (also a bit unhappy with it)
Various other improvements/refactorings as well.
2022-12-08 17:31:27 -04:00
|
|
|
line = lines[-1].rstrip()
|
2022-11-03 01:31:26 -03:00
|
|
|
# Indent must match exactly (TODO: Do something better)
|
2022-11-17 21:06:07 -04:00
|
|
|
if line[:12] != " " * 12:
|
2022-11-03 01:31:26 -03:00
|
|
|
return False
|
|
|
|
line = line[12:]
|
2022-11-17 21:06:07 -04:00
|
|
|
return line.startswith(
|
2023-01-29 21:28:39 -04:00
|
|
|
(
|
|
|
|
"goto ",
|
|
|
|
"return ",
|
|
|
|
"DISPATCH",
|
|
|
|
"GO_TO_",
|
|
|
|
"Py_UNREACHABLE()",
|
|
|
|
"ERROR_IF(true, ",
|
|
|
|
)
|
2022-11-17 21:06:07 -04:00
|
|
|
)
|
2022-11-15 23:59:19 -04:00
|
|
|
|
2022-11-03 01:31:26 -03:00
|
|
|
|
2023-01-17 19:59:19 -04:00
|
|
|
def variable_used(node: parser.Node, name: str) -> bool:
|
|
|
|
"""Determine whether a variable with a given name is used in a node."""
|
2023-01-29 21:28:39 -04:00
|
|
|
return any(
|
|
|
|
token.kind == "IDENTIFIER" and token.text == name for token in node.tokens
|
|
|
|
)
|
2023-01-13 21:06:45 -04:00
|
|
|
|
|
|
|
|
2023-07-07 15:03:27 -03:00
|
|
|
def variable_used_unspecialized(node: parser.Node, name: str) -> bool:
|
|
|
|
"""Like variable_used(), but skips #if ENABLE_SPECIALIZATION blocks."""
|
|
|
|
tokens: list[lx.Token] = []
|
|
|
|
skipping = False
|
|
|
|
for i, token in enumerate(node.tokens):
|
|
|
|
if token.kind == "MACRO":
|
|
|
|
text = "".join(token.text.split())
|
|
|
|
# TODO: Handle nested #if
|
|
|
|
if text == "#if":
|
|
|
|
if (
|
|
|
|
i + 1 < len(node.tokens)
|
|
|
|
and node.tokens[i + 1].text == "ENABLE_SPECIALIZATION"
|
|
|
|
):
|
|
|
|
skipping = True
|
|
|
|
elif text in ("#else", "#endif"):
|
|
|
|
skipping = False
|
|
|
|
if not skipping:
|
|
|
|
tokens.append(token)
|
|
|
|
return any(token.kind == "IDENTIFIER" and token.text == name for token in tokens)
|
|
|
|
|
|
|
|
|
2022-11-03 01:31:26 -03:00
|
|
|
def main():
|
2022-11-17 21:06:07 -04:00
|
|
|
"""Parse command line, parse input, analyze, write output."""
|
|
|
|
args = arg_parser.parse_args() # Prints message and sys.exit(2) on error
|
2023-03-04 00:59:21 -04:00
|
|
|
if len(args.input) == 0:
|
|
|
|
args.input.append(DEFAULT_INPUT)
|
2023-06-19 19:47:04 -03:00
|
|
|
|
|
|
|
# Raises OSError if input unreadable
|
2023-06-26 23:02:57 -03:00
|
|
|
a = Analyzer(args.input, args.output, args.metadata, args.pymetadata, args.executor_cases)
|
2023-06-19 19:47:04 -03:00
|
|
|
|
2023-03-15 12:37:36 -03:00
|
|
|
if args.emit_line_directives:
|
|
|
|
a.emit_line_directives = True
|
2022-11-17 21:06:07 -04:00
|
|
|
a.parse() # Raises SyntaxError on failure
|
2022-12-02 23:57:30 -04:00
|
|
|
a.analyze() # Prints messages and sets a.errors on failure
|
2022-11-17 21:06:07 -04:00
|
|
|
if a.errors:
|
|
|
|
sys.exit(f"Found {a.errors} errors")
|
2023-02-08 20:23:19 -04:00
|
|
|
a.write_instructions() # Raises OSError if output can't be written
|
|
|
|
a.write_metadata()
|
2023-06-26 23:02:57 -03:00
|
|
|
a.write_executor_instructions()
|
2022-11-03 01:31:26 -03:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|