mirror of https://github.com/python/cpython
GH-120024: Tidy up case generator code a bit. (GH-122780)
This commit is contained in:
parent
0d9c123d1a
commit
81c739e2dc
|
@ -882,7 +882,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// _DO_CALL
|
// _DO_CALL
|
||||||
args = &stack_pointer[-oparg];
|
|
||||||
self_or_null = maybe_self;
|
self_or_null = maybe_self;
|
||||||
callable = func;
|
callable = func;
|
||||||
{
|
{
|
||||||
|
@ -3651,7 +3650,6 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// _MONITOR_CALL
|
// _MONITOR_CALL
|
||||||
args = &stack_pointer[-oparg];
|
|
||||||
{
|
{
|
||||||
int is_meth = !PyStackRef_IsNull(maybe_self);
|
int is_meth = !PyStackRef_IsNull(maybe_self);
|
||||||
PyObject *function = PyStackRef_AsPyObjectBorrow(func);
|
PyObject *function = PyStackRef_AsPyObjectBorrow(func);
|
||||||
|
@ -3672,7 +3670,6 @@
|
||||||
if (err) goto error;
|
if (err) goto error;
|
||||||
}
|
}
|
||||||
// _DO_CALL
|
// _DO_CALL
|
||||||
args = &stack_pointer[-oparg];
|
|
||||||
self_or_null = maybe_self;
|
self_or_null = maybe_self;
|
||||||
callable = func;
|
callable = func;
|
||||||
{
|
{
|
||||||
|
|
|
@ -62,7 +62,6 @@ class Properties:
|
||||||
return not self.error_with_pop and not self.error_without_pop
|
return not self.error_with_pop and not self.error_without_pop
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
SKIP_PROPERTIES = Properties(
|
SKIP_PROPERTIES = Properties(
|
||||||
escapes=False,
|
escapes=False,
|
||||||
error_with_pop=False,
|
error_with_pop=False,
|
||||||
|
@ -99,7 +98,6 @@ class Skip:
|
||||||
|
|
||||||
|
|
||||||
class Flush:
|
class Flush:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def properties(self) -> Properties:
|
def properties(self) -> Properties:
|
||||||
return SKIP_PROPERTIES
|
return SKIP_PROPERTIES
|
||||||
|
@ -112,6 +110,7 @@ class Flush:
|
||||||
def size(self) -> int:
|
def size(self) -> int:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class StackItem:
|
class StackItem:
|
||||||
name: str
|
name: str
|
||||||
|
@ -133,6 +132,7 @@ class StackItem:
|
||||||
def get_size(self) -> str:
|
def get_size(self) -> str:
|
||||||
return self.size if self.size else "1"
|
return self.size if self.size else "1"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class StackEffect:
|
class StackEffect:
|
||||||
inputs: list[StackItem]
|
inputs: list[StackItem]
|
||||||
|
@ -150,6 +150,7 @@ class CacheEntry:
|
||||||
def __str__(self) -> str:
|
def __str__(self) -> str:
|
||||||
return f"{self.name}/{self.size}"
|
return f"{self.name}/{self.size}"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Uop:
|
class Uop:
|
||||||
name: str
|
name: str
|
||||||
|
@ -163,7 +164,7 @@ class Uop:
|
||||||
_size: int = -1
|
_size: int = -1
|
||||||
implicitly_created: bool = False
|
implicitly_created: bool = False
|
||||||
replicated = 0
|
replicated = 0
|
||||||
replicates : "Uop | None" = None
|
replicates: "Uop | None" = None
|
||||||
|
|
||||||
def dump(self, indent: str) -> None:
|
def dump(self, indent: str) -> None:
|
||||||
print(
|
print(
|
||||||
|
@ -308,19 +309,26 @@ def override_error(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def convert_stack_item(item: parser.StackEffect, replace_op_arg_1: str | None) -> StackItem:
|
def convert_stack_item(
|
||||||
|
item: parser.StackEffect, replace_op_arg_1: str | None
|
||||||
|
) -> StackItem:
|
||||||
cond = item.cond
|
cond = item.cond
|
||||||
if replace_op_arg_1 and OPARG_AND_1.match(item.cond):
|
if replace_op_arg_1 and OPARG_AND_1.match(item.cond):
|
||||||
cond = replace_op_arg_1
|
cond = replace_op_arg_1
|
||||||
return StackItem(
|
return StackItem(item.name, item.type, cond, item.size)
|
||||||
item.name, item.type, cond, item.size
|
|
||||||
)
|
|
||||||
|
|
||||||
def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None) -> StackEffect:
|
|
||||||
|
def analyze_stack(
|
||||||
|
op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None
|
||||||
|
) -> StackEffect:
|
||||||
inputs: list[StackItem] = [
|
inputs: list[StackItem] = [
|
||||||
convert_stack_item(i, replace_op_arg_1) for i in op.inputs if isinstance(i, parser.StackEffect)
|
convert_stack_item(i, replace_op_arg_1)
|
||||||
|
for i in op.inputs
|
||||||
|
if isinstance(i, parser.StackEffect)
|
||||||
|
]
|
||||||
|
outputs: list[StackItem] = [
|
||||||
|
convert_stack_item(i, replace_op_arg_1) for i in op.outputs
|
||||||
]
|
]
|
||||||
outputs: list[StackItem] = [convert_stack_item(i, replace_op_arg_1) for i in op.outputs]
|
|
||||||
# Mark variables with matching names at the base of the stack as "peek"
|
# Mark variables with matching names at the base of the stack as "peek"
|
||||||
modified = False
|
modified = False
|
||||||
for input, output in zip(inputs, outputs):
|
for input, output in zip(inputs, outputs):
|
||||||
|
@ -331,9 +339,11 @@ def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | No
|
||||||
if isinstance(op, parser.InstDef):
|
if isinstance(op, parser.InstDef):
|
||||||
output_names = [out.name for out in outputs]
|
output_names = [out.name for out in outputs]
|
||||||
for input in inputs:
|
for input in inputs:
|
||||||
if (variable_used(op, input.name) or
|
if (
|
||||||
variable_used(op, "DECREF_INPUTS") or
|
variable_used(op, input.name)
|
||||||
(not input.peek and input.name in output_names)):
|
or variable_used(op, "DECREF_INPUTS")
|
||||||
|
or (not input.peek and input.name in output_names)
|
||||||
|
):
|
||||||
input.used = True
|
input.used = True
|
||||||
for output in outputs:
|
for output in outputs:
|
||||||
if variable_used(op, output.name):
|
if variable_used(op, output.name):
|
||||||
|
@ -359,9 +369,9 @@ def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]
|
||||||
def find_assignment_target(idx: int) -> list[lexer.Token]:
|
def find_assignment_target(idx: int) -> list[lexer.Token]:
|
||||||
"""Find the tokens that make up the left-hand side of an assignment"""
|
"""Find the tokens that make up the left-hand side of an assignment"""
|
||||||
offset = 1
|
offset = 1
|
||||||
for tkn in reversed(node.block.tokens[:idx-1]):
|
for tkn in reversed(node.block.tokens[: idx - 1]):
|
||||||
if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE":
|
if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE":
|
||||||
return node.block.tokens[idx-offset:idx-1]
|
return node.block.tokens[idx - offset : idx - 1]
|
||||||
offset += 1
|
offset += 1
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -370,42 +380,54 @@ def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]
|
||||||
if tkn.kind != "IDENTIFIER" or tkn.text != "PyStackRef_FromPyObjectNew":
|
if tkn.kind != "IDENTIFIER" or tkn.text != "PyStackRef_FromPyObjectNew":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if idx == 0 or node.block.tokens[idx-1].kind != "EQUALS":
|
if idx == 0 or node.block.tokens[idx - 1].kind != "EQUALS":
|
||||||
raise analysis_error("Expected '=' before PyStackRef_FromPyObjectNew", tkn)
|
raise analysis_error("Expected '=' before PyStackRef_FromPyObjectNew", tkn)
|
||||||
|
|
||||||
lhs = find_assignment_target(idx)
|
lhs = find_assignment_target(idx)
|
||||||
if len(lhs) == 0:
|
if len(lhs) == 0:
|
||||||
raise analysis_error("PyStackRef_FromPyObjectNew() must be assigned to an output", tkn)
|
raise analysis_error(
|
||||||
|
"PyStackRef_FromPyObjectNew() must be assigned to an output", tkn
|
||||||
|
)
|
||||||
|
|
||||||
if lhs[0].kind == "TIMES" or any(t.kind == "ARROW" or t.kind == "LBRACKET" for t in lhs[1:]):
|
if lhs[0].kind == "TIMES" or any(
|
||||||
|
t.kind == "ARROW" or t.kind == "LBRACKET" for t in lhs[1:]
|
||||||
|
):
|
||||||
# Don't handle: *ptr = ..., ptr->field = ..., or ptr[field] = ...
|
# Don't handle: *ptr = ..., ptr->field = ..., or ptr[field] = ...
|
||||||
# Assume that they are visible to the GC.
|
# Assume that they are visible to the GC.
|
||||||
refs[tkn] = None
|
refs[tkn] = None
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if len(lhs) != 1 or lhs[0].kind != "IDENTIFIER":
|
if len(lhs) != 1 or lhs[0].kind != "IDENTIFIER":
|
||||||
raise analysis_error("PyStackRef_FromPyObjectNew() must be assigned to an output", tkn)
|
raise analysis_error(
|
||||||
|
"PyStackRef_FromPyObjectNew() must be assigned to an output", tkn
|
||||||
|
)
|
||||||
|
|
||||||
name = lhs[0].text
|
name = lhs[0].text
|
||||||
if not any(var.name == name for var in node.outputs):
|
if not any(var.name == name for var in node.outputs):
|
||||||
raise analysis_error(f"PyStackRef_FromPyObjectNew() must be assigned to an output, not '{name}'", tkn)
|
raise analysis_error(
|
||||||
|
f"PyStackRef_FromPyObjectNew() must be assigned to an output, not '{name}'",
|
||||||
|
tkn,
|
||||||
|
)
|
||||||
|
|
||||||
refs[tkn] = name
|
refs[tkn] = name
|
||||||
|
|
||||||
return refs
|
return refs
|
||||||
|
|
||||||
|
|
||||||
def variable_used(node: parser.InstDef, name: str) -> bool:
|
def variable_used(node: parser.InstDef, name: str) -> bool:
|
||||||
"""Determine whether a variable with a given name is used in a node."""
|
"""Determine whether a variable with a given name is used in a node."""
|
||||||
return any(
|
return any(
|
||||||
token.kind == "IDENTIFIER" and token.text == name for token in node.block.tokens
|
token.kind == "IDENTIFIER" and token.text == name for token in node.block.tokens
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def oparg_used(node: parser.InstDef) -> bool:
|
def oparg_used(node: parser.InstDef) -> bool:
|
||||||
"""Determine whether `oparg` is used in a node."""
|
"""Determine whether `oparg` is used in a node."""
|
||||||
return any(
|
return any(
|
||||||
token.kind == "IDENTIFIER" and token.text == "oparg" for token in node.tokens
|
token.kind == "IDENTIFIER" and token.text == "oparg" for token in node.tokens
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def tier_variable(node: parser.InstDef) -> int | None:
|
def tier_variable(node: parser.InstDef) -> int | None:
|
||||||
"""Determine whether a tier variable is used in a node."""
|
"""Determine whether a tier variable is used in a node."""
|
||||||
for token in node.tokens:
|
for token in node.tokens:
|
||||||
|
@ -416,6 +438,7 @@ def tier_variable(node: parser.InstDef) -> int | None:
|
||||||
return int(token.text[-1])
|
return int(token.text[-1])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def has_error_with_pop(op: parser.InstDef) -> bool:
|
def has_error_with_pop(op: parser.InstDef) -> bool:
|
||||||
return (
|
return (
|
||||||
variable_used(op, "ERROR_IF")
|
variable_used(op, "ERROR_IF")
|
||||||
|
@ -424,6 +447,7 @@ def has_error_with_pop(op: parser.InstDef) -> bool:
|
||||||
or variable_used(op, "resume_with_error")
|
or variable_used(op, "resume_with_error")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def has_error_without_pop(op: parser.InstDef) -> bool:
|
def has_error_without_pop(op: parser.InstDef) -> bool:
|
||||||
return (
|
return (
|
||||||
variable_used(op, "ERROR_NO_POP")
|
variable_used(op, "ERROR_NO_POP")
|
||||||
|
@ -606,8 +630,10 @@ def stack_effect_only_peeks(instr: parser.InstDef) -> bool:
|
||||||
for s, other in zip(stack_inputs, instr.outputs)
|
for s, other in zip(stack_inputs, instr.outputs)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
OPARG_AND_1 = re.compile("\\(*oparg *& *1")
|
OPARG_AND_1 = re.compile("\\(*oparg *& *1")
|
||||||
|
|
||||||
|
|
||||||
def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
|
def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
|
||||||
for effect in op.inputs:
|
for effect in op.inputs:
|
||||||
if isinstance(effect, parser.CacheEffect):
|
if isinstance(effect, parser.CacheEffect):
|
||||||
|
@ -623,6 +649,7 @@ def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def compute_properties(op: parser.InstDef) -> Properties:
|
def compute_properties(op: parser.InstDef) -> Properties:
|
||||||
has_free = (
|
has_free = (
|
||||||
variable_used(op, "PyCell_New")
|
variable_used(op, "PyCell_New")
|
||||||
|
@ -667,7 +694,12 @@ def compute_properties(op: parser.InstDef) -> Properties:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def make_uop(name: str, op: parser.InstDef, inputs: list[parser.InputEffect], uops: dict[str, Uop]) -> Uop:
|
def make_uop(
|
||||||
|
name: str,
|
||||||
|
op: parser.InstDef,
|
||||||
|
inputs: list[parser.InputEffect],
|
||||||
|
uops: dict[str, Uop],
|
||||||
|
) -> Uop:
|
||||||
result = Uop(
|
result = Uop(
|
||||||
name=name,
|
name=name,
|
||||||
context=op.context,
|
context=op.context,
|
||||||
|
@ -685,7 +717,9 @@ def make_uop(name: str, op: parser.InstDef, inputs: list[parser.InputEffect], uo
|
||||||
properties = compute_properties(op)
|
properties = compute_properties(op)
|
||||||
if properties.oparg:
|
if properties.oparg:
|
||||||
# May not need oparg anymore
|
# May not need oparg anymore
|
||||||
properties.oparg = any(token.text == "oparg" for token in op.block.tokens)
|
properties.oparg = any(
|
||||||
|
token.text == "oparg" for token in op.block.tokens
|
||||||
|
)
|
||||||
rep = Uop(
|
rep = Uop(
|
||||||
name=name_x,
|
name=name_x,
|
||||||
context=op.context,
|
context=op.context,
|
||||||
|
@ -736,8 +770,10 @@ def add_op(op: parser.InstDef, uops: dict[str, Uop]) -> None:
|
||||||
|
|
||||||
|
|
||||||
def add_instruction(
|
def add_instruction(
|
||||||
where: lexer.Token, name: str, parts: list[Part],
|
where: lexer.Token,
|
||||||
instructions: dict[str, Instruction]
|
name: str,
|
||||||
|
parts: list[Part],
|
||||||
|
instructions: dict[str, Instruction],
|
||||||
) -> None:
|
) -> None:
|
||||||
instructions[name] = Instruction(where, name, parts, None)
|
instructions[name] = Instruction(where, name, parts, None)
|
||||||
|
|
||||||
|
@ -781,7 +817,9 @@ def add_macro(
|
||||||
parts.append(Flush())
|
parts.append(Flush())
|
||||||
else:
|
else:
|
||||||
if part.name not in uops:
|
if part.name not in uops:
|
||||||
raise analysis_error(f"No Uop named {part.name}", macro.tokens[0])
|
raise analysis_error(
|
||||||
|
f"No Uop named {part.name}", macro.tokens[0]
|
||||||
|
)
|
||||||
parts.append(uops[part.name])
|
parts.append(uops[part.name])
|
||||||
case parser.CacheEffect():
|
case parser.CacheEffect():
|
||||||
parts.append(Skip(part.size))
|
parts.append(Skip(part.size))
|
||||||
|
|
|
@ -58,12 +58,13 @@ def emit_to(out: CWriter, tkn_iter: Iterator[Token], end: str) -> None:
|
||||||
parens -= 1
|
parens -= 1
|
||||||
out.emit(tkn)
|
out.emit(tkn)
|
||||||
|
|
||||||
|
|
||||||
ReplacementFunctionType = Callable[
|
ReplacementFunctionType = Callable[
|
||||||
[Token, Iterator[Token], Uop, Stack, Instruction | None], None
|
[Token, Iterator[Token], Uop, Stack, Instruction | None], None
|
||||||
]
|
]
|
||||||
|
|
||||||
class Emitter:
|
|
||||||
|
|
||||||
|
class Emitter:
|
||||||
out: CWriter
|
out: CWriter
|
||||||
_replacers: dict[str, ReplacementFunctionType]
|
_replacers: dict[str, ReplacementFunctionType]
|
||||||
|
|
||||||
|
@ -176,7 +177,6 @@ class Emitter:
|
||||||
else:
|
else:
|
||||||
self.out.emit(f"PyStackRef_CLOSE({var.name});\n")
|
self.out.emit(f"PyStackRef_CLOSE({var.name});\n")
|
||||||
|
|
||||||
|
|
||||||
def sync_sp(
|
def sync_sp(
|
||||||
self,
|
self,
|
||||||
tkn: Token,
|
tkn: Token,
|
||||||
|
@ -190,7 +190,6 @@ class Emitter:
|
||||||
next(tkn_iter)
|
next(tkn_iter)
|
||||||
stack.flush(self.out)
|
stack.flush(self.out)
|
||||||
|
|
||||||
|
|
||||||
def check_eval_breaker(
|
def check_eval_breaker(
|
||||||
self,
|
self,
|
||||||
tkn: Token,
|
tkn: Token,
|
||||||
|
@ -227,7 +226,6 @@ class Emitter:
|
||||||
# unused portions of the stack to NULL.
|
# unused portions of the stack to NULL.
|
||||||
stack.flush_single_var(self.out, target, uop.stack.outputs)
|
stack.flush_single_var(self.out, target, uop.stack.outputs)
|
||||||
|
|
||||||
|
|
||||||
def emit_tokens(
|
def emit_tokens(
|
||||||
self,
|
self,
|
||||||
uop: Uop,
|
uop: Uop,
|
||||||
|
@ -248,6 +246,7 @@ class Emitter:
|
||||||
def emit(self, txt: str | Token) -> None:
|
def emit(self, txt: str | Token) -> None:
|
||||||
self.out.emit(txt)
|
self.out.emit(txt)
|
||||||
|
|
||||||
|
|
||||||
def cflags(p: Properties) -> str:
|
def cflags(p: Properties) -> str:
|
||||||
flags: list[str] = []
|
flags: list[str] = []
|
||||||
if p.oparg:
|
if p.oparg:
|
||||||
|
|
|
@ -91,6 +91,7 @@ def emit_stack_effect_function(
|
||||||
def generate_stack_effect_functions(analysis: Analysis, out: CWriter) -> None:
|
def generate_stack_effect_functions(analysis: Analysis, out: CWriter) -> None:
|
||||||
popped_data: list[tuple[str, str]] = []
|
popped_data: list[tuple[str, str]] = []
|
||||||
pushed_data: list[tuple[str, str]] = []
|
pushed_data: list[tuple[str, str]] = []
|
||||||
|
|
||||||
def add(inst: Instruction | PseudoInstruction) -> None:
|
def add(inst: Instruction | PseudoInstruction) -> None:
|
||||||
stack = get_stack_effect(inst)
|
stack = get_stack_effect(inst)
|
||||||
popped = (-stack.base_offset).to_c()
|
popped = (-stack.base_offset).to_c()
|
||||||
|
|
|
@ -88,8 +88,8 @@ def emit_default(out: CWriter, uop: Uop) -> None:
|
||||||
else:
|
else:
|
||||||
out.emit(f"{var.name} = sym_new_not_null(ctx);\n")
|
out.emit(f"{var.name} = sym_new_not_null(ctx);\n")
|
||||||
|
|
||||||
class OptimizerEmitter(Emitter):
|
|
||||||
|
|
||||||
|
class OptimizerEmitter(Emitter):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -139,7 +139,7 @@ def write_uop(
|
||||||
local = locals[var.name]
|
local = locals[var.name]
|
||||||
else:
|
else:
|
||||||
local = Local.local(var)
|
local = Local.local(var)
|
||||||
out.emit(stack.push(local))
|
stack.push(local)
|
||||||
out.start_line()
|
out.start_line()
|
||||||
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
|
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
|
||||||
except StackError as ex:
|
except StackError as ex:
|
||||||
|
@ -161,8 +161,9 @@ def generate_abstract_interpreter(
|
||||||
out.emit("\n")
|
out.emit("\n")
|
||||||
base_uop_names = set([uop.name for uop in base.uops.values()])
|
base_uop_names = set([uop.name for uop in base.uops.values()])
|
||||||
for abstract_uop_name in abstract.uops:
|
for abstract_uop_name in abstract.uops:
|
||||||
assert abstract_uop_name in base_uop_names,\
|
assert (
|
||||||
f"All abstract uops should override base uops, but {abstract_uop_name} is not."
|
abstract_uop_name in base_uop_names
|
||||||
|
), f"All abstract uops should override base uops, but {abstract_uop_name} is not."
|
||||||
|
|
||||||
for uop in base.uops.values():
|
for uop in base.uops.values():
|
||||||
override: Uop | None = None
|
override: Uop | None = None
|
||||||
|
@ -192,7 +193,7 @@ def generate_abstract_interpreter(
|
||||||
|
|
||||||
|
|
||||||
def generate_tier2_abstract_from_files(
|
def generate_tier2_abstract_from_files(
|
||||||
filenames: list[str], outfilename: str, debug: bool=False
|
filenames: list[str], outfilename: str, debug: bool = False
|
||||||
) -> None:
|
) -> None:
|
||||||
assert len(filenames) == 2, "Need a base file and an abstract cases file."
|
assert len(filenames) == 2, "Need a base file and an abstract cases file."
|
||||||
base = analyze_files([filenames[0]])
|
base = analyze_files([filenames[0]])
|
||||||
|
@ -211,7 +212,7 @@ arg_parser.add_argument(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
arg_parser.add_argument("input", nargs='*', help="Abstract interpreter definition file")
|
arg_parser.add_argument("input", nargs="*", help="Abstract interpreter definition file")
|
||||||
|
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"base", nargs="*", help="The base instruction definition file(s)"
|
"base", nargs="*", help="The base instruction definition file(s)"
|
||||||
|
|
|
@ -66,6 +66,7 @@ class Node:
|
||||||
assert context is not None
|
assert context is not None
|
||||||
return context.owner.tokens[context.begin]
|
return context.owner.tokens[context.begin]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Block(Node):
|
class Block(Node):
|
||||||
# This just holds a context which has the list of tokens.
|
# This just holds a context which has the list of tokens.
|
||||||
|
@ -426,7 +427,9 @@ class Parser(PLexer):
|
||||||
raise self.make_syntax_error("Expected {")
|
raise self.make_syntax_error("Expected {")
|
||||||
if members := self.members():
|
if members := self.members():
|
||||||
if self.expect(lx.RBRACE) and self.expect(lx.SEMI):
|
if self.expect(lx.RBRACE) and self.expect(lx.SEMI):
|
||||||
return Pseudo(tkn.text, inp, outp, flags, members)
|
return Pseudo(
|
||||||
|
tkn.text, inp, outp, flags, members
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def members(self) -> list[str] | None:
|
def members(self) -> list[str] | None:
|
||||||
|
|
|
@ -18,7 +18,6 @@ from cwriter import CWriter
|
||||||
from typing import TextIO
|
from typing import TextIO
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_OUTPUT = ROOT / "Lib/_opcode_metadata.py"
|
DEFAULT_OUTPUT = ROOT / "Lib/_opcode_metadata.py"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -38,9 +38,9 @@ def var_size(var: StackItem) -> str:
|
||||||
else:
|
else:
|
||||||
return "1"
|
return "1"
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Local:
|
class Local:
|
||||||
|
|
||||||
item: StackItem
|
item: StackItem
|
||||||
cached: bool
|
cached: bool
|
||||||
in_memory: bool
|
in_memory: bool
|
||||||
|
@ -75,6 +75,7 @@ class Local:
|
||||||
def is_array(self) -> bool:
|
def is_array(self) -> bool:
|
||||||
return self.item.is_array()
|
return self.item.is_array()
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class StackOffset:
|
class StackOffset:
|
||||||
"The stack offset of the virtual base of the stack from the physical stack pointer"
|
"The stack offset of the virtual base of the stack from the physical stack pointer"
|
||||||
|
@ -183,44 +184,37 @@ class Stack:
|
||||||
)
|
)
|
||||||
if var.name in UNUSED:
|
if var.name in UNUSED:
|
||||||
if popped.name not in UNUSED and popped.name in self.defined:
|
if popped.name not in UNUSED and popped.name in self.defined:
|
||||||
raise StackError(f"Value is declared unused, but is already cached by prior operation")
|
raise StackError(
|
||||||
|
f"Value is declared unused, but is already cached by prior operation"
|
||||||
|
)
|
||||||
return "", popped
|
return "", popped
|
||||||
if not var.used:
|
if not var.used:
|
||||||
return "", popped
|
return "", popped
|
||||||
self.defined.add(var.name)
|
self.defined.add(var.name)
|
||||||
# Always define array variables as it is free, and their offset might have changed
|
if popped.defined:
|
||||||
if var.is_array():
|
|
||||||
return (
|
|
||||||
f"{var.name} = &stack_pointer[{self.top_offset.to_c()}];\n",
|
|
||||||
Local.redefinition(var, popped)
|
|
||||||
)
|
|
||||||
if not popped.defined:
|
|
||||||
return (
|
|
||||||
f"{var.name} = stack_pointer[{self.top_offset.to_c()}];\n",
|
|
||||||
Local.redefinition(var, popped)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
if popped.name == var.name:
|
if popped.name == var.name:
|
||||||
return "", popped
|
return "", popped
|
||||||
else:
|
else:
|
||||||
return (
|
defn = f"{var.name} = {popped.name};\n"
|
||||||
f"{var.name} = {popped.name};\n",
|
else:
|
||||||
Local.redefinition(var, popped)
|
if var.is_array():
|
||||||
)
|
defn = f"{var.name} = &stack_pointer[{self.top_offset.to_c()}];\n"
|
||||||
|
else:
|
||||||
|
defn = f"{var.name} = stack_pointer[{self.top_offset.to_c()}];\n"
|
||||||
|
return defn, Local.redefinition(var, popped)
|
||||||
|
|
||||||
self.base_offset.pop(var)
|
self.base_offset.pop(var)
|
||||||
if var.name in UNUSED or not var.used:
|
if var.name in UNUSED or not var.used:
|
||||||
return "", Local.unused(var)
|
return "", Local.unused(var)
|
||||||
self.defined.add(var.name)
|
self.defined.add(var.name)
|
||||||
cast = f"({var.type})" if (not indirect and var.type) else ""
|
cast = f"({var.type})" if (not indirect and var.type) else ""
|
||||||
bits = ".bits" if cast and not extract_bits else ""
|
bits = ".bits" if cast and not extract_bits else ""
|
||||||
assign = (
|
assign = f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};"
|
||||||
f"{var.name} = {cast}{indirect}stack_pointer[{self.base_offset.to_c()}]{bits};"
|
|
||||||
)
|
|
||||||
if var.condition:
|
if var.condition:
|
||||||
if var.condition == "1":
|
if var.condition == "1":
|
||||||
assign = f"{assign}\n"
|
assign = f"{assign}\n"
|
||||||
elif var.condition == "0":
|
elif var.condition == "0":
|
||||||
return "", Local.unused(var)
|
return "", Local.unused(var)
|
||||||
else:
|
else:
|
||||||
assign = f"if ({var.condition}) {{ {assign} }}\n"
|
assign = f"if ({var.condition}) {{ {assign} }}\n"
|
||||||
else:
|
else:
|
||||||
|
@ -228,21 +222,12 @@ class Stack:
|
||||||
in_memory = var.is_array() or var.peek
|
in_memory = var.is_array() or var.peek
|
||||||
return assign, Local(var, not var.is_array(), in_memory, True)
|
return assign, Local(var, not var.is_array(), in_memory, True)
|
||||||
|
|
||||||
def push(self, var: Local) -> str:
|
def push(self, var: Local) -> None:
|
||||||
self.variables.append(var)
|
self.variables.append(var)
|
||||||
if var.is_array() and not var.defined and var.item.used:
|
self.top_offset.push(var.item)
|
||||||
assert var.in_memory
|
if var.item.used:
|
||||||
assert not var.cached
|
|
||||||
c_offset = self.top_offset.to_c()
|
|
||||||
self.top_offset.push(var.item)
|
|
||||||
self.defined.add(var.name)
|
self.defined.add(var.name)
|
||||||
var.defined = True
|
var.defined = True
|
||||||
return f"{var.name} = &stack_pointer[{c_offset}];\n"
|
|
||||||
else:
|
|
||||||
self.top_offset.push(var.item)
|
|
||||||
if var.item.used:
|
|
||||||
self.defined.add(var.name)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def define_output_arrays(self, outputs: list[StackItem]) -> str:
|
def define_output_arrays(self, outputs: list[StackItem]) -> str:
|
||||||
res = []
|
res = []
|
||||||
|
@ -257,24 +242,38 @@ class Stack:
|
||||||
return "\n".join(res)
|
return "\n".join(res)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _do_emit(out: CWriter, var: StackItem, base_offset: StackOffset,
|
def _do_emit(
|
||||||
cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
out: CWriter,
|
||||||
|
var: StackItem,
|
||||||
|
base_offset: StackOffset,
|
||||||
|
cast_type: str = "uintptr_t",
|
||||||
|
extract_bits: bool = False,
|
||||||
|
) -> None:
|
||||||
cast = f"({cast_type})" if var.type else ""
|
cast = f"({cast_type})" if var.type else ""
|
||||||
bits = ".bits" if cast and not extract_bits else ""
|
bits = ".bits" if cast and not extract_bits else ""
|
||||||
if var.condition == "0":
|
if var.condition == "0":
|
||||||
return
|
return
|
||||||
if var.condition and var.condition != "1":
|
if var.condition and var.condition != "1":
|
||||||
out.emit(f"if ({var.condition}) ")
|
out.emit(f"if ({var.condition}) ")
|
||||||
out.emit(
|
out.emit(f"stack_pointer[{base_offset.to_c()}]{bits} = {cast}{var.name};\n")
|
||||||
f"stack_pointer[{base_offset.to_c()}]{bits} = {cast}{var.name};\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _do_flush(out: CWriter, variables: list[Local], base_offset: StackOffset, top_offset: StackOffset,
|
def _do_flush(
|
||||||
cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
out: CWriter,
|
||||||
|
variables: list[Local],
|
||||||
|
base_offset: StackOffset,
|
||||||
|
top_offset: StackOffset,
|
||||||
|
cast_type: str = "uintptr_t",
|
||||||
|
extract_bits: bool = False,
|
||||||
|
) -> None:
|
||||||
out.start_line()
|
out.start_line()
|
||||||
for var in variables:
|
for var in variables:
|
||||||
if var.cached and not var.in_memory and not var.item.peek and not var.name in UNUSED:
|
if (
|
||||||
|
var.cached
|
||||||
|
and not var.in_memory
|
||||||
|
and not var.item.peek
|
||||||
|
and not var.name in UNUSED
|
||||||
|
):
|
||||||
Stack._do_emit(out, var.item, base_offset, cast_type, extract_bits)
|
Stack._do_emit(out, var.item, base_offset, cast_type, extract_bits)
|
||||||
base_offset.push(var.item)
|
base_offset.push(var.item)
|
||||||
if base_offset.to_c() != top_offset.to_c():
|
if base_offset.to_c() != top_offset.to_c():
|
||||||
|
@ -286,31 +285,55 @@ class Stack:
|
||||||
out.emit("assert(WITHIN_STACK_BOUNDS());\n")
|
out.emit("assert(WITHIN_STACK_BOUNDS());\n")
|
||||||
out.start_line()
|
out.start_line()
|
||||||
|
|
||||||
def flush_locally(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
def flush_locally(
|
||||||
self._do_flush(out, self.variables[:], self.base_offset.copy(), self.top_offset.copy(), cast_type, extract_bits)
|
self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False
|
||||||
|
) -> None:
|
||||||
|
self._do_flush(
|
||||||
|
out,
|
||||||
|
self.variables[:],
|
||||||
|
self.base_offset.copy(),
|
||||||
|
self.top_offset.copy(),
|
||||||
|
cast_type,
|
||||||
|
extract_bits,
|
||||||
|
)
|
||||||
|
|
||||||
def flush(self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
def flush(
|
||||||
self._do_flush(out, self.variables, self.base_offset, self.top_offset, cast_type, extract_bits)
|
self, out: CWriter, cast_type: str = "uintptr_t", extract_bits: bool = False
|
||||||
|
) -> None:
|
||||||
|
self._do_flush(
|
||||||
|
out,
|
||||||
|
self.variables,
|
||||||
|
self.base_offset,
|
||||||
|
self.top_offset,
|
||||||
|
cast_type,
|
||||||
|
extract_bits,
|
||||||
|
)
|
||||||
self.variables = []
|
self.variables = []
|
||||||
self.base_offset.clear()
|
self.base_offset.clear()
|
||||||
self.top_offset.clear()
|
self.top_offset.clear()
|
||||||
|
|
||||||
def flush_single_var(self, out: CWriter, var_name: str, outputs: list[StackItem],
|
def flush_single_var(
|
||||||
cast_type: str = "uintptr_t", extract_bits: bool = False) -> None:
|
self,
|
||||||
|
out: CWriter,
|
||||||
|
var_name: str,
|
||||||
|
outputs: list[StackItem],
|
||||||
|
cast_type: str = "uintptr_t",
|
||||||
|
extract_bits: bool = False,
|
||||||
|
) -> None:
|
||||||
assert any(var.name == var_name for var in outputs)
|
assert any(var.name == var_name for var in outputs)
|
||||||
base_offset = self.base_offset.copy()
|
base_offset = self.base_offset.copy()
|
||||||
top_offset = self.top_offset.copy()
|
top_offset = self.top_offset.copy()
|
||||||
for var in self.variables:
|
for var in self.variables:
|
||||||
base_offset.push(var.item)
|
base_offset.push(var.item)
|
||||||
for var in outputs:
|
for output in outputs:
|
||||||
if any(var == v.item for v in self.variables):
|
if any(output == v.item for v in self.variables):
|
||||||
# The variable is already on the stack, such as a peeked value
|
# The variable is already on the stack, such as a peeked value
|
||||||
# in the tier1 generator
|
# in the tier1 generator
|
||||||
continue
|
continue
|
||||||
if var.name == var_name:
|
if output.name == var_name:
|
||||||
Stack._do_emit(out, var, base_offset, cast_type, extract_bits)
|
Stack._do_emit(out, output, base_offset, cast_type, extract_bits)
|
||||||
base_offset.push(var)
|
base_offset.push(output)
|
||||||
top_offset.push(var)
|
top_offset.push(output)
|
||||||
if base_offset.to_c() != top_offset.to_c():
|
if base_offset.to_c() != top_offset.to_c():
|
||||||
print("base", base_offset, "top", top_offset)
|
print("base", base_offset, "top", top_offset)
|
||||||
assert False
|
assert False
|
||||||
|
@ -324,7 +347,8 @@ class Stack:
|
||||||
|
|
||||||
def get_stack_effect(inst: Instruction | PseudoInstruction) -> Stack:
|
def get_stack_effect(inst: Instruction | PseudoInstruction) -> Stack:
|
||||||
stack = Stack()
|
stack = Stack()
|
||||||
def stacks(inst : Instruction | PseudoInstruction) -> Iterator[StackEffect]:
|
|
||||||
|
def stacks(inst: Instruction | PseudoInstruction) -> Iterator[StackEffect]:
|
||||||
if isinstance(inst, Instruction):
|
if isinstance(inst, Instruction):
|
||||||
for uop in inst.parts:
|
for uop in inst.parts:
|
||||||
if isinstance(uop, Uop):
|
if isinstance(uop, Uop):
|
||||||
|
|
|
@ -30,6 +30,7 @@ def write_opcode_targets(analysis: Analysis, out: CWriter) -> None:
|
||||||
out.emit(target)
|
out.emit(target)
|
||||||
out.emit("};\n")
|
out.emit("};\n")
|
||||||
|
|
||||||
|
|
||||||
arg_parser = argparse.ArgumentParser(
|
arg_parser = argparse.ArgumentParser(
|
||||||
description="Generate the file with dispatch targets.",
|
description="Generate the file with dispatch targets.",
|
||||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||||
|
|
|
@ -33,6 +33,7 @@ DEFAULT_OUTPUT = ROOT / "Python/generated_cases.c.h"
|
||||||
|
|
||||||
FOOTER = "#undef TIER_ONE\n"
|
FOOTER = "#undef TIER_ONE\n"
|
||||||
|
|
||||||
|
|
||||||
def declare_variable(var: StackItem, out: CWriter) -> None:
|
def declare_variable(var: StackItem, out: CWriter) -> None:
|
||||||
type, null = type_and_null(var)
|
type, null = type_and_null(var)
|
||||||
space = " " if type[-1].isalnum() else ""
|
space = " " if type[-1].isalnum() else ""
|
||||||
|
@ -61,8 +62,14 @@ def declare_variables(inst: Instruction, out: CWriter) -> None:
|
||||||
required.remove(var.name)
|
required.remove(var.name)
|
||||||
declare_variable(var, out)
|
declare_variable(var, out)
|
||||||
|
|
||||||
|
|
||||||
def write_uop(
|
def write_uop(
|
||||||
uop: Part, emitter: Emitter, offset: int, stack: Stack, inst: Instruction, braces: bool
|
uop: Part,
|
||||||
|
emitter: Emitter,
|
||||||
|
offset: int,
|
||||||
|
stack: Stack,
|
||||||
|
inst: Instruction,
|
||||||
|
braces: bool,
|
||||||
) -> int:
|
) -> int:
|
||||||
# out.emit(stack.as_comment() + "\n")
|
# out.emit(stack.as_comment() + "\n")
|
||||||
if isinstance(uop, Skip):
|
if isinstance(uop, Skip):
|
||||||
|
@ -123,7 +130,7 @@ def write_uop(
|
||||||
if output.name in uop.deferred_refs.values():
|
if output.name in uop.deferred_refs.values():
|
||||||
# We've already spilled this when emitting tokens
|
# We've already spilled this when emitting tokens
|
||||||
output.cached = False
|
output.cached = False
|
||||||
emitter.emit(stack.push(output))
|
stack.push(output)
|
||||||
if braces:
|
if braces:
|
||||||
emitter.out.start_line()
|
emitter.out.start_line()
|
||||||
emitter.emit("}\n")
|
emitter.emit("}\n")
|
||||||
|
|
|
@ -19,7 +19,7 @@ from generators_common import (
|
||||||
emit_to,
|
emit_to,
|
||||||
write_header,
|
write_header,
|
||||||
type_and_null,
|
type_and_null,
|
||||||
Emitter
|
Emitter,
|
||||||
)
|
)
|
||||||
from cwriter import CWriter
|
from cwriter import CWriter
|
||||||
from typing import TextIO, Iterator
|
from typing import TextIO, Iterator
|
||||||
|
@ -62,7 +62,6 @@ def declare_variables(uop: Uop, out: CWriter) -> None:
|
||||||
|
|
||||||
|
|
||||||
class Tier2Emitter(Emitter):
|
class Tier2Emitter(Emitter):
|
||||||
|
|
||||||
def __init__(self, out: CWriter):
|
def __init__(self, out: CWriter):
|
||||||
super().__init__(out)
|
super().__init__(out)
|
||||||
self._replacers["oparg"] = self.oparg
|
self._replacers["oparg"] = self.oparg
|
||||||
|
@ -110,10 +109,10 @@ class Tier2Emitter(Emitter):
|
||||||
next(tkn_iter) # Semi colon
|
next(tkn_iter) # Semi colon
|
||||||
self.emit(") {\n")
|
self.emit(") {\n")
|
||||||
self.emit("UOP_STAT_INC(uopcode, miss);\n")
|
self.emit("UOP_STAT_INC(uopcode, miss);\n")
|
||||||
self.emit("JUMP_TO_JUMP_TARGET();\n");
|
self.emit("JUMP_TO_JUMP_TARGET();\n")
|
||||||
self.emit("}\n")
|
self.emit("}\n")
|
||||||
|
|
||||||
def exit_if( # type: ignore[override]
|
def exit_if( # type: ignore[override]
|
||||||
self,
|
self,
|
||||||
tkn: Token,
|
tkn: Token,
|
||||||
tkn_iter: Iterator[Token],
|
tkn_iter: Iterator[Token],
|
||||||
|
@ -150,6 +149,7 @@ class Tier2Emitter(Emitter):
|
||||||
assert one.text == "1"
|
assert one.text == "1"
|
||||||
self.out.emit_at(uop.name[-1], tkn)
|
self.out.emit_at(uop.name[-1], tkn)
|
||||||
|
|
||||||
|
|
||||||
def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
|
def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
|
||||||
locals: dict[str, Local] = {}
|
locals: dict[str, Local] = {}
|
||||||
try:
|
try:
|
||||||
|
@ -186,7 +186,7 @@ def write_uop(uop: Uop, emitter: Emitter, stack: Stack) -> None:
|
||||||
if output.name in uop.deferred_refs.values():
|
if output.name in uop.deferred_refs.values():
|
||||||
# We've already spilled this when emitting tokens
|
# We've already spilled this when emitting tokens
|
||||||
output.cached = False
|
output.cached = False
|
||||||
emitter.emit(stack.push(output))
|
stack.push(output)
|
||||||
except StackError as ex:
|
except StackError as ex:
|
||||||
raise analysis_error(ex.args[0], uop.body[0]) from None
|
raise analysis_error(ex.args[0], uop.body[0]) from None
|
||||||
|
|
||||||
|
@ -219,7 +219,9 @@ def generate_tier2(
|
||||||
continue
|
continue
|
||||||
why_not_viable = uop.why_not_viable()
|
why_not_viable = uop.why_not_viable()
|
||||||
if why_not_viable is not None:
|
if why_not_viable is not None:
|
||||||
out.emit(f"/* {uop.name} is not a viable micro-op for tier 2 because it {why_not_viable} */\n\n")
|
out.emit(
|
||||||
|
f"/* {uop.name} is not a viable micro-op for tier 2 because it {why_not_viable} */\n\n"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
out.emit(f"case {uop.name}: {{\n")
|
out.emit(f"case {uop.name}: {{\n")
|
||||||
declare_variables(uop, out)
|
declare_variables(uop, out)
|
||||||
|
|
Loading…
Reference in New Issue