2000-03-16 16:06:59 -04:00
|
|
|
"""A flow graph representation for Python bytecode"""
|
|
|
|
|
2000-02-14 10:14:29 -04:00
|
|
|
import dis
|
2007-11-27 17:34:01 -04:00
|
|
|
import types
|
2000-11-05 23:43:11 -04:00
|
|
|
import sys
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
from compiler import misc
|
2002-12-31 14:17:44 -04:00
|
|
|
from compiler.consts \
|
|
|
|
import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
|
2000-11-05 23:43:11 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
class FlowGraph:
|
|
|
|
def __init__(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.current = self.entry = Block()
|
|
|
|
self.exit = Block("exit")
|
|
|
|
self.blocks = misc.Set()
|
|
|
|
self.blocks.add(self.entry)
|
|
|
|
self.blocks.add(self.exit)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def startBlock(self, block):
|
2000-11-05 23:43:11 -04:00
|
|
|
if self._debug:
|
|
|
|
if self.current:
|
|
|
|
print "end", repr(self.current)
|
2001-04-12 17:21:39 -03:00
|
|
|
print " next", self.current.next
|
2009-02-06 17:08:52 -04:00
|
|
|
print " prev", self.current.prev
|
2000-11-05 23:43:11 -04:00
|
|
|
print " ", self.current.get_children()
|
|
|
|
print repr(block)
|
2000-08-12 17:32:46 -03:00
|
|
|
self.current = block
|
2000-03-16 16:06:59 -04:00
|
|
|
|
2001-04-12 17:21:39 -03:00
|
|
|
def nextBlock(self, block=None):
|
2000-08-12 17:32:46 -03:00
|
|
|
# XXX think we need to specify when there is implicit transfer
|
2000-11-05 23:43:11 -04:00
|
|
|
# from one block to the next. might be better to represent this
|
|
|
|
# with explicit JUMP_ABSOLUTE instructions that are optimized
|
|
|
|
# out when they are unnecessary.
|
2000-08-12 17:32:46 -03:00
|
|
|
#
|
|
|
|
# I think this strategy works: each block has a child
|
|
|
|
# designated as "next" which is returned as the last of the
|
|
|
|
# children. because the nodes in a graph are emitted in
|
|
|
|
# reverse post order, the "next" block will always be emitted
|
|
|
|
# immediately after its parent.
|
|
|
|
# Worry: maintaining this invariant could be tricky
|
2000-11-05 23:43:11 -04:00
|
|
|
if block is None:
|
|
|
|
block = self.newBlock()
|
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
# Note: If the current block ends with an unconditional control
|
|
|
|
# transfer, then it is techically incorrect to add an implicit
|
|
|
|
# transfer to the block graph. Doing so results in code generation
|
|
|
|
# for unreachable blocks. That doesn't appear to be very common
|
|
|
|
# with Python code and since the built-in compiler doesn't optimize
|
|
|
|
# it out we don't either.
|
2000-08-12 17:32:46 -03:00
|
|
|
self.current.addNext(block)
|
|
|
|
self.startBlock(block)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def newBlock(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
b = Block()
|
|
|
|
self.blocks.add(b)
|
|
|
|
return b
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def startExitBlock(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.startBlock(self.exit)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
_debug = 0
|
|
|
|
|
|
|
|
def _enable_debug(self):
|
|
|
|
self._debug = 1
|
|
|
|
|
|
|
|
def _disable_debug(self):
|
|
|
|
self._debug = 0
|
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def emit(self, *inst):
|
2000-11-05 23:43:11 -04:00
|
|
|
if self._debug:
|
|
|
|
print "\t", inst
|
|
|
|
if len(inst) == 2 and isinstance(inst[1], Block):
|
|
|
|
self.current.addOutEdge(inst[1])
|
2000-08-12 17:32:46 -03:00
|
|
|
self.current.emit(inst)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
def getBlocksInOrder(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
"""Return the blocks in reverse postorder
|
|
|
|
|
|
|
|
i.e. each node appears before all of its successors
|
|
|
|
"""
|
2009-02-06 17:08:52 -04:00
|
|
|
order = order_blocks(self.entry, self.exit)
|
2000-08-12 17:32:46 -03:00
|
|
|
return order
|
2000-03-16 16:06:59 -04:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
def getBlocks(self):
|
|
|
|
return self.blocks.elements()
|
|
|
|
|
|
|
|
def getRoot(self):
|
|
|
|
"""Return nodes appropriate for use with dominator"""
|
|
|
|
return self.entry
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
def getContainedGraphs(self):
|
|
|
|
l = []
|
|
|
|
for b in self.getBlocks():
|
|
|
|
l.extend(b.getContainedGraphs())
|
|
|
|
return l
|
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
|
|
|
|
def order_blocks(start_block, exit_block):
|
|
|
|
"""Order blocks so that they are emitted in the right order"""
|
|
|
|
# Rules:
|
|
|
|
# - when a block has a next block, the next block must be emitted just after
|
|
|
|
# - when a block has followers (relative jumps), it must be emitted before
|
|
|
|
# them
|
|
|
|
# - all reachable blocks must be emitted
|
2000-03-16 16:06:59 -04:00
|
|
|
order = []
|
2009-02-06 17:08:52 -04:00
|
|
|
|
|
|
|
# Find all the blocks to be emitted.
|
|
|
|
remaining = set()
|
|
|
|
todo = [start_block]
|
|
|
|
while todo:
|
|
|
|
b = todo.pop()
|
|
|
|
if b in remaining:
|
|
|
|
continue
|
|
|
|
remaining.add(b)
|
|
|
|
for c in b.get_children():
|
|
|
|
if c not in remaining:
|
|
|
|
todo.append(c)
|
|
|
|
|
|
|
|
# A block is dominated by another block if that block must be emitted
|
|
|
|
# before it.
|
|
|
|
dominators = {}
|
|
|
|
for b in remaining:
|
|
|
|
if __debug__ and b.next:
|
|
|
|
assert b is b.next[0].prev[0], (b, b.next)
|
2009-02-28 15:03:21 -04:00
|
|
|
# Make sure every block appears in dominators, even if no
|
|
|
|
# other block must precede it.
|
|
|
|
dominators.setdefault(b, set())
|
2013-08-17 09:43:51 -03:00
|
|
|
# preceding blocks dominate following blocks
|
2009-02-06 17:08:52 -04:00
|
|
|
for c in b.get_followers():
|
|
|
|
while 1:
|
|
|
|
dominators.setdefault(c, set()).add(b)
|
|
|
|
# Any block that has a next pointer leading to c is also
|
|
|
|
# dominated because the whole chain will be emitted at once.
|
|
|
|
# Walk backwards and add them all.
|
|
|
|
if c.prev and c.prev[0] is not b:
|
|
|
|
c = c.prev[0]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
def find_next():
|
|
|
|
# Find a block that can be emitted next.
|
|
|
|
for b in remaining:
|
|
|
|
for c in dominators[b]:
|
|
|
|
if c in remaining:
|
|
|
|
break # can't emit yet, dominated by a remaining block
|
|
|
|
else:
|
|
|
|
return b
|
|
|
|
assert 0, 'circular dependency, cannot find next block'
|
|
|
|
|
|
|
|
b = start_block
|
|
|
|
while 1:
|
|
|
|
order.append(b)
|
|
|
|
remaining.discard(b)
|
|
|
|
if b.next:
|
|
|
|
b = b.next[0]
|
2000-08-12 17:32:46 -03:00
|
|
|
continue
|
2009-02-06 17:08:52 -04:00
|
|
|
elif b is not exit_block and not b.has_unconditional_transfer():
|
|
|
|
order.append(exit_block)
|
|
|
|
if not remaining:
|
|
|
|
break
|
|
|
|
b = find_next()
|
2000-03-16 16:06:59 -04:00
|
|
|
return order
|
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
class Block:
|
|
|
|
_count = 0
|
|
|
|
|
|
|
|
def __init__(self, label=''):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.insts = []
|
2009-02-06 17:08:52 -04:00
|
|
|
self.outEdges = set()
|
2000-08-12 17:32:46 -03:00
|
|
|
self.label = label
|
|
|
|
self.bid = Block._count
|
|
|
|
self.next = []
|
2009-02-06 17:08:52 -04:00
|
|
|
self.prev = []
|
2000-08-12 17:32:46 -03:00
|
|
|
Block._count = Block._count + 1
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def __repr__(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
if self.label:
|
2000-11-05 23:43:11 -04:00
|
|
|
return "<block %s id=%d>" % (self.label, self.bid)
|
2000-08-12 17:32:46 -03:00
|
|
|
else:
|
2000-11-05 23:43:11 -04:00
|
|
|
return "<block id=%d>" % (self.bid)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def __str__(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
insts = map(str, self.insts)
|
|
|
|
return "<block %s %d:\n%s>" % (self.label, self.bid,
|
2002-06-06 15:30:10 -03:00
|
|
|
'\n'.join(insts))
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def emit(self, inst):
|
2000-08-12 17:32:46 -03:00
|
|
|
op = inst[0]
|
|
|
|
self.insts.append(inst)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def getInstructions(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
return self.insts
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def addOutEdge(self, block):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.outEdges.add(block)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def addNext(self, block):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.next.append(block)
|
|
|
|
assert len(self.next) == 1, map(str, self.next)
|
2009-02-06 17:08:52 -04:00
|
|
|
block.prev.append(self)
|
|
|
|
assert len(block.prev) == 1, map(str, block.prev)
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS',
|
|
|
|
'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP',
|
|
|
|
)
|
2000-11-05 23:47:39 -04:00
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
def has_unconditional_transfer(self):
|
|
|
|
"""Returns True if there is an unconditional transfer to an other block
|
|
|
|
at the end of this block. This means there is no risk for the bytecode
|
|
|
|
executer to go past this block's bytecode."""
|
2000-11-05 23:47:39 -04:00
|
|
|
try:
|
|
|
|
op, arg = self.insts[-1]
|
|
|
|
except (IndexError, ValueError):
|
|
|
|
return
|
2009-02-06 17:08:52 -04:00
|
|
|
return op in self._uncond_transfer
|
2000-11-05 23:47:39 -04:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
def get_children(self):
|
2009-02-06 17:08:52 -04:00
|
|
|
return list(self.outEdges) + self.next
|
|
|
|
|
|
|
|
def get_followers(self):
|
|
|
|
"""Get the whole list of followers, including the next block."""
|
|
|
|
followers = set(self.next)
|
|
|
|
# Blocks that must be emitted *after* this one, because of
|
|
|
|
# bytecode offsets (e.g. relative jumps) pointing to them.
|
|
|
|
for inst in self.insts:
|
|
|
|
if inst[0] in PyFlowGraph.hasjrel:
|
|
|
|
followers.add(inst[1])
|
|
|
|
return followers
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-11-05 23:43:11 -04:00
|
|
|
def getContainedGraphs(self):
|
|
|
|
"""Return all graphs contained within this block.
|
|
|
|
|
|
|
|
For example, a MAKE_FUNCTION block will contain a reference to
|
|
|
|
the graph for the function body.
|
|
|
|
"""
|
|
|
|
contained = []
|
|
|
|
for inst in self.insts:
|
|
|
|
if len(inst) == 1:
|
|
|
|
continue
|
|
|
|
op = inst[1]
|
|
|
|
if hasattr(op, 'graph'):
|
|
|
|
contained.append(op.graph)
|
|
|
|
return contained
|
|
|
|
|
2000-02-14 10:14:29 -04:00
|
|
|
# flags for code objects
|
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
# the FlowGraph is transformed in place; it exists in one of these states
|
|
|
|
RAW = "RAW"
|
|
|
|
FLAT = "FLAT"
|
|
|
|
CONV = "CONV"
|
|
|
|
DONE = "DONE"
|
|
|
|
|
|
|
|
class PyFlowGraph(FlowGraph):
|
|
|
|
super_init = FlowGraph.__init__
|
|
|
|
|
2001-08-29 15:09:50 -03:00
|
|
|
def __init__(self, name, filename, args=(), optimized=0, klass=None):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.super_init()
|
|
|
|
self.name = name
|
|
|
|
self.filename = filename
|
|
|
|
self.docstring = None
|
|
|
|
self.args = args # XXX
|
|
|
|
self.argcount = getArgCount(args)
|
2001-08-29 15:09:50 -03:00
|
|
|
self.klass = klass
|
2000-08-12 17:32:46 -03:00
|
|
|
if optimized:
|
2001-10-18 18:57:37 -03:00
|
|
|
self.flags = CO_OPTIMIZED | CO_NEWLOCALS
|
2000-08-12 17:32:46 -03:00
|
|
|
else:
|
|
|
|
self.flags = 0
|
|
|
|
self.consts = []
|
|
|
|
self.names = []
|
2001-04-12 03:40:42 -03:00
|
|
|
# Free variables found by the symbol table scan, including
|
|
|
|
# variables used only in nested scopes, are included here.
|
|
|
|
self.freevars = []
|
|
|
|
self.cellvars = []
|
|
|
|
# The closure list is used to track the order of cell
|
|
|
|
# variables and free variables in the resulting code object.
|
|
|
|
# The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
|
|
|
|
# kinds of variables.
|
|
|
|
self.closure = []
|
2000-02-14 10:14:29 -04:00
|
|
|
self.varnames = list(args) or []
|
2000-02-17 18:09:35 -04:00
|
|
|
for i in range(len(self.varnames)):
|
|
|
|
var = self.varnames[i]
|
|
|
|
if isinstance(var, TupleArg):
|
|
|
|
self.varnames[i] = var.getName()
|
2000-03-16 16:06:59 -04:00
|
|
|
self.stage = RAW
|
2000-02-15 20:50:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def setDocstring(self, doc):
|
|
|
|
self.docstring = doc
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def setFlag(self, flag):
|
2000-08-12 17:32:46 -03:00
|
|
|
self.flags = self.flags | flag
|
|
|
|
if flag == CO_VARARGS:
|
|
|
|
self.argcount = self.argcount - 1
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2001-09-14 19:49:08 -03:00
|
|
|
def checkFlag(self, flag):
|
|
|
|
if self.flags & flag:
|
|
|
|
return 1
|
|
|
|
|
2001-04-12 03:40:42 -03:00
|
|
|
def setFreeVars(self, names):
|
|
|
|
self.freevars = list(names)
|
|
|
|
|
|
|
|
def setCellVars(self, names):
|
|
|
|
self.cellvars = names
|
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def getCode(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
"""Get a Python code object"""
|
2005-10-20 16:59:25 -03:00
|
|
|
assert self.stage == RAW
|
|
|
|
self.computeStackDepth()
|
|
|
|
self.flattenGraph()
|
|
|
|
assert self.stage == FLAT
|
|
|
|
self.convertArgs()
|
|
|
|
assert self.stage == CONV
|
|
|
|
self.makeByteCode()
|
|
|
|
assert self.stage == DONE
|
|
|
|
return self.newCodeObject()
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def dump(self, io=None):
|
|
|
|
if io:
|
|
|
|
save = sys.stdout
|
|
|
|
sys.stdout = io
|
|
|
|
pc = 0
|
2000-02-14 10:14:29 -04:00
|
|
|
for t in self.insts:
|
|
|
|
opname = t[0]
|
2000-03-16 16:06:59 -04:00
|
|
|
if opname == "SET_LINENO":
|
|
|
|
print
|
2000-02-14 10:14:29 -04:00
|
|
|
if len(t) == 1:
|
2000-03-16 16:06:59 -04:00
|
|
|
print "\t", "%3d" % pc, opname
|
|
|
|
pc = pc + 1
|
|
|
|
else:
|
|
|
|
print "\t", "%3d" % pc, opname, t[1]
|
|
|
|
pc = pc + 3
|
|
|
|
if io:
|
|
|
|
sys.stdout = save
|
|
|
|
|
2001-10-17 10:37:29 -03:00
|
|
|
def computeStackDepth(self):
|
|
|
|
"""Compute the max stack depth.
|
|
|
|
|
|
|
|
Approach is to compute the stack effect of each basic block.
|
|
|
|
Then find the path through the code with the largest total
|
|
|
|
effect.
|
|
|
|
"""
|
|
|
|
depth = {}
|
|
|
|
exit = None
|
|
|
|
for b in self.getBlocks():
|
|
|
|
depth[b] = findDepth(b.getInstructions())
|
|
|
|
|
|
|
|
seen = {}
|
|
|
|
|
|
|
|
def max_depth(b, d):
|
2008-12-15 04:58:59 -04:00
|
|
|
if b in seen:
|
2001-10-17 10:37:29 -03:00
|
|
|
return d
|
|
|
|
seen[b] = 1
|
|
|
|
d = d + depth[b]
|
|
|
|
children = b.get_children()
|
|
|
|
if children:
|
|
|
|
return max([max_depth(c, d) for c in children])
|
|
|
|
else:
|
|
|
|
if not b.label == "exit":
|
|
|
|
return max_depth(self.exit, d)
|
|
|
|
else:
|
|
|
|
return d
|
|
|
|
|
|
|
|
self.stacksize = max_depth(self.entry, 0)
|
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def flattenGraph(self):
|
2000-08-12 17:32:46 -03:00
|
|
|
"""Arrange the blocks in order and resolve jumps"""
|
|
|
|
assert self.stage == RAW
|
|
|
|
self.insts = insts = []
|
|
|
|
pc = 0
|
|
|
|
begin = {}
|
|
|
|
end = {}
|
2000-11-05 23:43:11 -04:00
|
|
|
for b in self.getBlocksInOrder():
|
2000-08-12 17:32:46 -03:00
|
|
|
begin[b] = pc
|
|
|
|
for inst in b.getInstructions():
|
|
|
|
insts.append(inst)
|
|
|
|
if len(inst) == 1:
|
|
|
|
pc = pc + 1
|
2002-12-31 14:17:44 -04:00
|
|
|
elif inst[0] != "SET_LINENO":
|
2000-08-12 17:32:46 -03:00
|
|
|
# arg takes 2 bytes
|
|
|
|
pc = pc + 3
|
|
|
|
end[b] = pc
|
|
|
|
pc = 0
|
|
|
|
for i in range(len(insts)):
|
|
|
|
inst = insts[i]
|
|
|
|
if len(inst) == 1:
|
2000-03-16 16:06:59 -04:00
|
|
|
pc = pc + 1
|
2002-12-31 14:17:44 -04:00
|
|
|
elif inst[0] != "SET_LINENO":
|
2000-03-16 16:06:59 -04:00
|
|
|
pc = pc + 3
|
2000-08-12 17:32:46 -03:00
|
|
|
opname = inst[0]
|
2009-02-06 17:08:52 -04:00
|
|
|
if opname in self.hasjrel:
|
2000-03-16 16:06:59 -04:00
|
|
|
oparg = inst[1]
|
|
|
|
offset = begin[oparg] - pc
|
|
|
|
insts[i] = opname, offset
|
2009-02-06 17:08:52 -04:00
|
|
|
elif opname in self.hasjabs:
|
2000-03-16 16:06:59 -04:00
|
|
|
insts[i] = opname, begin[inst[1]]
|
2000-08-12 17:32:46 -03:00
|
|
|
self.stage = FLAT
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2009-02-06 17:08:52 -04:00
|
|
|
hasjrel = set()
|
2000-03-16 16:06:59 -04:00
|
|
|
for i in dis.hasjrel:
|
|
|
|
hasjrel.add(dis.opname[i])
|
2009-02-06 17:08:52 -04:00
|
|
|
hasjabs = set()
|
2000-03-16 16:06:59 -04:00
|
|
|
for i in dis.hasjabs:
|
|
|
|
hasjabs.add(dis.opname[i])
|
|
|
|
|
|
|
|
def convertArgs(self):
|
|
|
|
"""Convert arguments from symbolic to concrete form"""
|
|
|
|
assert self.stage == FLAT
|
2001-04-11 13:21:51 -03:00
|
|
|
self.consts.insert(0, self.docstring)
|
2001-04-12 03:40:42 -03:00
|
|
|
self.sort_cellvars()
|
2000-03-16 16:06:59 -04:00
|
|
|
for i in range(len(self.insts)):
|
|
|
|
t = self.insts[i]
|
|
|
|
if len(t) == 2:
|
2001-08-29 15:09:50 -03:00
|
|
|
opname, oparg = t
|
2000-03-16 16:06:59 -04:00
|
|
|
conv = self._converters.get(opname, None)
|
|
|
|
if conv:
|
|
|
|
self.insts[i] = opname, conv(self, oparg)
|
|
|
|
self.stage = CONV
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2001-04-12 03:40:42 -03:00
|
|
|
def sort_cellvars(self):
|
|
|
|
"""Sort cellvars in the order of varnames and prune from freevars.
|
|
|
|
"""
|
|
|
|
cells = {}
|
|
|
|
for name in self.cellvars:
|
|
|
|
cells[name] = 1
|
|
|
|
self.cellvars = [name for name in self.varnames
|
2008-12-15 04:58:59 -04:00
|
|
|
if name in cells]
|
2001-04-12 03:40:42 -03:00
|
|
|
for name in self.cellvars:
|
|
|
|
del cells[name]
|
|
|
|
self.cellvars = self.cellvars + cells.keys()
|
|
|
|
self.closure = self.cellvars + self.freevars
|
|
|
|
|
2001-08-29 16:45:33 -03:00
|
|
|
def _lookupName(self, name, list):
|
|
|
|
"""Return index of name in list, appending if necessary
|
2001-08-29 15:09:50 -03:00
|
|
|
|
2001-08-29 16:45:33 -03:00
|
|
|
This routine uses a list instead of a dictionary, because a
|
|
|
|
dictionary can't store two different keys if the keys have the
|
|
|
|
same value but different types, e.g. 2 and 2L. The compiler
|
|
|
|
must treat these two separately, so it does an explicit type
|
|
|
|
comparison before comparing the values.
|
|
|
|
"""
|
2000-10-13 18:58:13 -03:00
|
|
|
t = type(name)
|
|
|
|
for i in range(len(list)):
|
2001-08-29 16:45:33 -03:00
|
|
|
if t == type(list[i]) and list[i] == name:
|
2000-02-17 18:58:54 -04:00
|
|
|
return i
|
|
|
|
end = len(list)
|
|
|
|
list.append(name)
|
|
|
|
return end
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
_converters = {}
|
|
|
|
def _convert_LOAD_CONST(self, arg):
|
2000-11-05 23:43:11 -04:00
|
|
|
if hasattr(arg, 'getCode'):
|
|
|
|
arg = arg.getCode()
|
2001-08-29 16:45:33 -03:00
|
|
|
return self._lookupName(arg, self.consts)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def _convert_LOAD_FAST(self, arg):
|
2001-08-29 16:45:33 -03:00
|
|
|
self._lookupName(arg, self.names)
|
|
|
|
return self._lookupName(arg, self.varnames)
|
2000-03-16 16:06:59 -04:00
|
|
|
_convert_STORE_FAST = _convert_LOAD_FAST
|
|
|
|
_convert_DELETE_FAST = _convert_LOAD_FAST
|
|
|
|
|
2001-08-28 13:36:12 -03:00
|
|
|
def _convert_LOAD_NAME(self, arg):
|
2001-08-29 16:45:33 -03:00
|
|
|
if self.klass is None:
|
|
|
|
self._lookupName(arg, self.varnames)
|
|
|
|
return self._lookupName(arg, self.names)
|
2001-08-28 13:36:12 -03:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def _convert_NAME(self, arg):
|
2001-08-30 17:25:55 -03:00
|
|
|
if self.klass is None:
|
|
|
|
self._lookupName(arg, self.varnames)
|
2001-08-29 16:45:33 -03:00
|
|
|
return self._lookupName(arg, self.names)
|
2000-03-16 16:06:59 -04:00
|
|
|
_convert_STORE_NAME = _convert_NAME
|
|
|
|
_convert_DELETE_NAME = _convert_NAME
|
|
|
|
_convert_IMPORT_NAME = _convert_NAME
|
|
|
|
_convert_IMPORT_FROM = _convert_NAME
|
|
|
|
_convert_STORE_ATTR = _convert_NAME
|
|
|
|
_convert_LOAD_ATTR = _convert_NAME
|
|
|
|
_convert_DELETE_ATTR = _convert_NAME
|
|
|
|
_convert_LOAD_GLOBAL = _convert_NAME
|
|
|
|
_convert_STORE_GLOBAL = _convert_NAME
|
|
|
|
_convert_DELETE_GLOBAL = _convert_NAME
|
|
|
|
|
2001-04-12 03:40:42 -03:00
|
|
|
def _convert_DEREF(self, arg):
|
2001-08-29 16:45:33 -03:00
|
|
|
self._lookupName(arg, self.names)
|
|
|
|
self._lookupName(arg, self.varnames)
|
|
|
|
return self._lookupName(arg, self.closure)
|
2001-04-12 03:40:42 -03:00
|
|
|
_convert_LOAD_DEREF = _convert_DEREF
|
|
|
|
_convert_STORE_DEREF = _convert_DEREF
|
|
|
|
|
|
|
|
def _convert_LOAD_CLOSURE(self, arg):
|
2001-08-29 16:45:33 -03:00
|
|
|
self._lookupName(arg, self.varnames)
|
|
|
|
return self._lookupName(arg, self.closure)
|
2001-04-12 03:40:42 -03:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
_cmp = list(dis.cmp_op)
|
|
|
|
def _convert_COMPARE_OP(self, arg):
|
2000-08-12 17:32:46 -03:00
|
|
|
return self._cmp.index(arg)
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
# similarly for other opcodes...
|
|
|
|
|
|
|
|
for name, obj in locals().items():
|
|
|
|
if name[:9] == "_convert_":
|
|
|
|
opname = name[9:]
|
2001-10-18 18:57:37 -03:00
|
|
|
_converters[opname] = obj
|
2000-03-16 16:06:59 -04:00
|
|
|
del name, obj, opname
|
|
|
|
|
|
|
|
def makeByteCode(self):
|
|
|
|
assert self.stage == CONV
|
|
|
|
self.lnotab = lnotab = LineAddrTable()
|
|
|
|
for t in self.insts:
|
|
|
|
opname = t[0]
|
|
|
|
if len(t) == 1:
|
|
|
|
lnotab.addCode(self.opnum[opname])
|
|
|
|
else:
|
|
|
|
oparg = t[1]
|
|
|
|
if opname == "SET_LINENO":
|
|
|
|
lnotab.nextLine(oparg)
|
2002-12-31 14:17:44 -04:00
|
|
|
continue
|
2000-03-16 16:06:59 -04:00
|
|
|
hi, lo = twobyte(oparg)
|
2000-08-12 17:32:46 -03:00
|
|
|
try:
|
|
|
|
lnotab.addCode(self.opnum[opname], lo, hi)
|
|
|
|
except ValueError:
|
|
|
|
print opname, oparg
|
|
|
|
print self.opnum[opname], lo, hi
|
|
|
|
raise
|
2000-03-16 16:06:59 -04:00
|
|
|
self.stage = DONE
|
|
|
|
|
2000-02-14 10:14:29 -04:00
|
|
|
opnum = {}
|
|
|
|
for num in range(len(dis.opname)):
|
2000-02-21 18:46:00 -04:00
|
|
|
opnum[dis.opname[num]] = num
|
2000-03-16 16:06:59 -04:00
|
|
|
del num
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def newCodeObject(self):
|
|
|
|
assert self.stage == DONE
|
2001-09-14 19:49:08 -03:00
|
|
|
if (self.flags & CO_NEWLOCALS) == 0:
|
2000-03-16 16:06:59 -04:00
|
|
|
nlocals = 0
|
|
|
|
else:
|
|
|
|
nlocals = len(self.varnames)
|
|
|
|
argcount = self.argcount
|
|
|
|
if self.flags & CO_VARKEYWORDS:
|
|
|
|
argcount = argcount - 1
|
2007-11-27 17:34:01 -04:00
|
|
|
return types.CodeType(argcount, nlocals, self.stacksize, self.flags,
|
2000-03-16 16:06:59 -04:00
|
|
|
self.lnotab.getCode(), self.getConsts(),
|
|
|
|
tuple(self.names), tuple(self.varnames),
|
2000-05-02 19:32:59 -03:00
|
|
|
self.filename, self.name, self.lnotab.firstline,
|
2001-04-12 03:40:42 -03:00
|
|
|
self.lnotab.getTable(), tuple(self.freevars),
|
|
|
|
tuple(self.cellvars))
|
2000-03-16 16:06:59 -04:00
|
|
|
|
|
|
|
def getConsts(self):
|
|
|
|
"""Return a tuple for the const slot of the code object
|
|
|
|
|
|
|
|
Must convert references to code (MAKE_FUNCTION) to code
|
|
|
|
objects recursively.
|
|
|
|
"""
|
|
|
|
l = []
|
|
|
|
for elt in self.consts:
|
|
|
|
if isinstance(elt, PyFlowGraph):
|
|
|
|
elt = elt.getCode()
|
|
|
|
l.append(elt)
|
|
|
|
return tuple(l)
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def isJump(opname):
|
|
|
|
if opname[:4] == 'JUMP':
|
2000-08-12 17:32:46 -03:00
|
|
|
return 1
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
class TupleArg:
|
|
|
|
"""Helper for marking func defs with nested tuples in arglist"""
|
|
|
|
def __init__(self, count, names):
|
|
|
|
self.count = count
|
|
|
|
self.names = names
|
|
|
|
def __repr__(self):
|
|
|
|
return "TupleArg(%s, %s)" % (self.count, self.names)
|
|
|
|
def getName(self):
|
2001-04-12 14:33:34 -03:00
|
|
|
return ".%d" % self.count
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def getArgCount(args):
|
|
|
|
argcount = len(args)
|
|
|
|
if args:
|
2000-08-12 17:32:46 -03:00
|
|
|
for arg in args:
|
|
|
|
if isinstance(arg, TupleArg):
|
|
|
|
numNames = len(misc.flatten(arg.names))
|
|
|
|
argcount = argcount - numNames
|
2000-03-16 16:06:59 -04:00
|
|
|
return argcount
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2000-03-16 16:06:59 -04:00
|
|
|
def twobyte(val):
|
|
|
|
"""Convert an int argument into high and low bytes"""
|
2005-11-24 23:17:59 -04:00
|
|
|
assert isinstance(val, int)
|
2000-03-16 16:06:59 -04:00
|
|
|
return divmod(val, 256)
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
class LineAddrTable:
|
|
|
|
"""lnotab
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2001-06-09 06:26:21 -03:00
|
|
|
This class builds the lnotab, which is documented in compile.c.
|
|
|
|
Here's a brief recap:
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
For each SET_LINENO instruction after the first one, two bytes are
|
|
|
|
added to lnotab. (In some cases, multiple two-byte entries are
|
|
|
|
added.) The first byte is the distance in bytes between the
|
|
|
|
instruction for the last SET_LINENO and the current SET_LINENO.
|
|
|
|
The second byte is offset in line numbers. If either offset is
|
2001-06-09 06:26:21 -03:00
|
|
|
greater than 255, multiple two-byte entries are added -- see
|
|
|
|
compile.c for the delicate details.
|
2000-02-14 10:14:29 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.code = []
|
|
|
|
self.codeOffset = 0
|
|
|
|
self.firstline = 0
|
|
|
|
self.lastline = 0
|
|
|
|
self.lastoff = 0
|
|
|
|
self.lnotab = []
|
|
|
|
|
2000-03-06 14:53:14 -04:00
|
|
|
def addCode(self, *args):
|
|
|
|
for arg in args:
|
|
|
|
self.code.append(chr(arg))
|
|
|
|
self.codeOffset = self.codeOffset + len(args)
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
def nextLine(self, lineno):
|
|
|
|
if self.firstline == 0:
|
|
|
|
self.firstline = lineno
|
|
|
|
self.lastline = lineno
|
|
|
|
else:
|
|
|
|
# compute deltas
|
|
|
|
addr = self.codeOffset - self.lastoff
|
|
|
|
line = lineno - self.lastline
|
2000-09-01 17:47:37 -03:00
|
|
|
# Python assumes that lineno always increases with
|
|
|
|
# increasing bytecode address (lnotab is unsigned char).
|
|
|
|
# Depending on when SET_LINENO instructions are emitted
|
|
|
|
# this is not always true. Consider the code:
|
|
|
|
# a = (1,
|
|
|
|
# b)
|
|
|
|
# In the bytecode stream, the assignment to "a" occurs
|
|
|
|
# after the loading of "b". This works with the C Python
|
|
|
|
# compiler because it only generates a SET_LINENO instruction
|
|
|
|
# for the assignment.
|
2002-12-31 14:17:44 -04:00
|
|
|
if line >= 0:
|
2001-06-09 06:26:21 -03:00
|
|
|
push = self.lnotab.append
|
|
|
|
while addr > 255:
|
|
|
|
push(255); push(0)
|
|
|
|
addr -= 255
|
|
|
|
while line > 255:
|
|
|
|
push(addr); push(255)
|
|
|
|
line -= 255
|
|
|
|
addr = 0
|
|
|
|
if addr > 0 or line > 0:
|
|
|
|
push(addr); push(line)
|
2000-09-01 17:47:37 -03:00
|
|
|
self.lastline = lineno
|
|
|
|
self.lastoff = self.codeOffset
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
def getCode(self):
|
2002-06-06 15:30:10 -03:00
|
|
|
return ''.join(self.code)
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
def getTable(self):
|
2002-06-06 15:30:10 -03:00
|
|
|
return ''.join(map(chr, self.lnotab))
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2000-02-14 10:14:29 -04:00
|
|
|
class StackDepthTracker:
|
2000-03-16 16:06:59 -04:00
|
|
|
# XXX 1. need to keep track of stack depth on jumps
|
|
|
|
# XXX 2. at least partly as a result, this code is broken
|
2000-02-14 10:14:29 -04:00
|
|
|
|
2001-10-17 10:37:29 -03:00
|
|
|
def findDepth(self, insts, debug=0):
|
2000-02-21 18:46:00 -04:00
|
|
|
depth = 0
|
|
|
|
maxDepth = 0
|
|
|
|
for i in insts:
|
|
|
|
opname = i[0]
|
2001-10-17 10:37:29 -03:00
|
|
|
if debug:
|
|
|
|
print i,
|
|
|
|
delta = self.effect.get(opname, None)
|
|
|
|
if delta is not None:
|
2000-02-21 18:46:00 -04:00
|
|
|
depth = depth + delta
|
|
|
|
else:
|
|
|
|
# now check patterns
|
2000-05-02 19:32:59 -03:00
|
|
|
for pat, pat_delta in self.patterns:
|
2000-02-21 18:46:00 -04:00
|
|
|
if opname[:len(pat)] == pat:
|
2000-05-02 19:32:59 -03:00
|
|
|
delta = pat_delta
|
2000-02-21 18:46:00 -04:00
|
|
|
depth = depth + delta
|
|
|
|
break
|
|
|
|
# if we still haven't found a match
|
2001-10-17 10:37:29 -03:00
|
|
|
if delta is None:
|
2000-05-02 19:32:59 -03:00
|
|
|
meth = getattr(self, opname, None)
|
|
|
|
if meth is not None:
|
|
|
|
depth = depth + meth(i[1])
|
2001-10-17 10:37:29 -03:00
|
|
|
if depth > maxDepth:
|
|
|
|
maxDepth = depth
|
|
|
|
if debug:
|
|
|
|
print depth, maxDepth
|
2000-02-21 18:46:00 -04:00
|
|
|
return maxDepth
|
2000-02-14 10:14:29 -04:00
|
|
|
|
|
|
|
effect = {
|
2000-02-21 18:46:00 -04:00
|
|
|
'POP_TOP': -1,
|
|
|
|
'DUP_TOP': 1,
|
2010-01-11 18:36:12 -04:00
|
|
|
'LIST_APPEND': -1,
|
|
|
|
'SET_ADD': -1,
|
|
|
|
'MAP_ADD': -2,
|
2000-02-21 18:46:00 -04:00
|
|
|
'SLICE+1': -1,
|
|
|
|
'SLICE+2': -1,
|
|
|
|
'SLICE+3': -2,
|
|
|
|
'STORE_SLICE+0': -1,
|
|
|
|
'STORE_SLICE+1': -2,
|
|
|
|
'STORE_SLICE+2': -2,
|
|
|
|
'STORE_SLICE+3': -3,
|
|
|
|
'DELETE_SLICE+0': -1,
|
|
|
|
'DELETE_SLICE+1': -2,
|
|
|
|
'DELETE_SLICE+2': -2,
|
|
|
|
'DELETE_SLICE+3': -3,
|
|
|
|
'STORE_SUBSCR': -3,
|
|
|
|
'DELETE_SUBSCR': -2,
|
|
|
|
# PRINT_EXPR?
|
|
|
|
'PRINT_ITEM': -1,
|
|
|
|
'RETURN_VALUE': -1,
|
2002-12-31 14:17:44 -04:00
|
|
|
'YIELD_VALUE': -1,
|
2001-08-30 17:25:55 -03:00
|
|
|
'EXEC_STMT': -3,
|
2000-02-21 18:46:00 -04:00
|
|
|
'BUILD_CLASS': -2,
|
|
|
|
'STORE_NAME': -1,
|
|
|
|
'STORE_ATTR': -2,
|
|
|
|
'DELETE_ATTR': -1,
|
|
|
|
'STORE_GLOBAL': -1,
|
|
|
|
'BUILD_MAP': 1,
|
|
|
|
'COMPARE_OP': -1,
|
|
|
|
'STORE_FAST': -1,
|
2000-10-12 17:23:23 -03:00
|
|
|
'IMPORT_STAR': -1,
|
2006-03-03 14:16:20 -04:00
|
|
|
'IMPORT_NAME': -1,
|
2000-10-12 17:23:23 -03:00
|
|
|
'IMPORT_FROM': 1,
|
2001-10-17 10:37:29 -03:00
|
|
|
'LOAD_ATTR': 0, # unlike other loads
|
2001-08-29 19:27:14 -03:00
|
|
|
# close enough...
|
|
|
|
'SETUP_EXCEPT': 3,
|
|
|
|
'SETUP_FINALLY': 3,
|
2001-08-30 17:25:55 -03:00
|
|
|
'FOR_ITER': 1,
|
2006-03-09 22:28:35 -04:00
|
|
|
'WITH_CLEANUP': -1,
|
2000-02-21 18:46:00 -04:00
|
|
|
}
|
2000-02-14 10:14:29 -04:00
|
|
|
# use pattern match
|
|
|
|
patterns = [
|
2000-02-21 18:46:00 -04:00
|
|
|
('BINARY_', -1),
|
|
|
|
('LOAD_', 1),
|
|
|
|
]
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2000-08-12 17:32:46 -03:00
|
|
|
def UNPACK_SEQUENCE(self, count):
|
2001-08-29 17:55:17 -03:00
|
|
|
return count-1
|
2000-02-14 10:14:29 -04:00
|
|
|
def BUILD_TUPLE(self, count):
|
2001-08-29 17:55:17 -03:00
|
|
|
return -count+1
|
2000-02-14 10:14:29 -04:00
|
|
|
def BUILD_LIST(self, count):
|
2001-08-29 17:55:17 -03:00
|
|
|
return -count+1
|
2010-01-09 19:35:54 -04:00
|
|
|
def BUILD_SET(self, count):
|
|
|
|
return -count+1
|
2000-02-14 10:14:29 -04:00
|
|
|
def CALL_FUNCTION(self, argc):
|
2000-02-21 18:46:00 -04:00
|
|
|
hi, lo = divmod(argc, 256)
|
2001-10-17 10:37:29 -03:00
|
|
|
return -(lo + hi * 2)
|
2000-05-02 19:32:59 -03:00
|
|
|
def CALL_FUNCTION_VAR(self, argc):
|
2001-10-17 10:37:29 -03:00
|
|
|
return self.CALL_FUNCTION(argc)-1
|
2000-05-02 19:32:59 -03:00
|
|
|
def CALL_FUNCTION_KW(self, argc):
|
2001-10-17 10:37:29 -03:00
|
|
|
return self.CALL_FUNCTION(argc)-1
|
2000-05-02 19:32:59 -03:00
|
|
|
def CALL_FUNCTION_VAR_KW(self, argc):
|
2001-10-17 10:37:29 -03:00
|
|
|
return self.CALL_FUNCTION(argc)-2
|
2000-02-14 10:14:29 -04:00
|
|
|
def MAKE_FUNCTION(self, argc):
|
2000-02-21 18:46:00 -04:00
|
|
|
return -argc
|
2001-10-17 10:37:29 -03:00
|
|
|
def MAKE_CLOSURE(self, argc):
|
|
|
|
# XXX need to account for free variables too!
|
|
|
|
return -argc
|
2000-02-14 10:14:29 -04:00
|
|
|
def BUILD_SLICE(self, argc):
|
2000-02-21 18:46:00 -04:00
|
|
|
if argc == 2:
|
|
|
|
return -1
|
|
|
|
elif argc == 3:
|
|
|
|
return -2
|
2001-10-17 10:37:29 -03:00
|
|
|
def DUP_TOPX(self, argc):
|
|
|
|
return argc
|
2001-10-18 18:57:37 -03:00
|
|
|
|
2000-02-14 10:14:29 -04:00
|
|
|
findDepth = StackDepthTracker().findDepth
|