Merged revisions 55270-55324 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/branches/p3yk

........
  r55271 | fred.drake | 2007-05-11 10:14:47 -0700 (Fri, 11 May 2007) | 3 lines

  remove jpeg, panel libraries for SGI; there is more IRIX stuff left over,
  I guess that should be removed too, but will leave for someone who is sure
........
  r55280 | fred.drake | 2007-05-11 19:11:37 -0700 (Fri, 11 May 2007) | 1 line

  remove mention of file that has been removed
........
  r55301 | brett.cannon | 2007-05-13 17:38:05 -0700 (Sun, 13 May 2007) | 4 lines

  Remove rexec and Bastion from the stdlib.  This also eliminates the need for
  f_restricted on frames.  This in turn negates the need for
  PyEval_GetRestricted() and PyFrame_IsRestricted().
........
  r55303 | brett.cannon | 2007-05-13 19:22:22 -0700 (Sun, 13 May 2007) | 2 lines

  Remove the md5 and sha modules.
........
  r55305 | george.yoshida | 2007-05-13 19:45:55 -0700 (Sun, 13 May 2007) | 2 lines

  fix markup
........
  r55306 | neal.norwitz | 2007-05-13 19:47:57 -0700 (Sun, 13 May 2007) | 1 line

  Get the doc building again after some removals.
........
  r55307 | neal.norwitz | 2007-05-13 19:50:45 -0700 (Sun, 13 May 2007) | 1 line

  Get test_pyclbr passing again after getstatus was removed from commands.  This "test case" was weird since it was just importing a seemingly random module.  Remove the import
........
  r55322 | brett.cannon | 2007-05-14 14:09:20 -0700 (Mon, 14 May 2007) | 3 lines

  Remove the compiler package.  Will eventually need a mechanism to byte compile
  an AST.
........
This commit is contained in:
Guido van Rossum 2007-05-14 22:03:55 +00:00
parent 827b055ffe
commit a8add0ec5e
68 changed files with 52 additions and 9866 deletions

View File

@ -88,7 +88,6 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \
commontex/reportingbugs.tex \
lib/lib.tex \
lib/asttable.tex \
lib/compiler.tex \
lib/distutils.tex \
lib/email.tex \
lib/emailencoders.tex \
@ -200,21 +199,15 @@ LIBFILES= $(MANSTYLES) $(INDEXSTYLES) $(COMMONTEX) \
lib/libaudioop.tex \
lib/libimageop.tex \
lib/libaifc.tex \
lib/libjpeg.tex \
lib/librgbimg.tex \
lib/libossaudiodev.tex \
lib/libcrypto.tex \
lib/libhashlib.tex \
lib/libmd5.tex \
lib/libsha.tex \
lib/libhmac.tex \
lib/libstdwin.tex \
lib/libsun.tex \
lib/libxdrlib.tex \
lib/libimghdr.tex \
lib/librestricted.tex \
lib/librexec.tex \
lib/libbastion.tex \
lib/libformatter.tex \
lib/liboperator.tex \
lib/libresource.tex \

View File

@ -1,353 +0,0 @@
\chapter{Python compiler package \label{compiler}}
\sectionauthor{Jeremy Hylton}{jeremy@zope.com}
The Python compiler package is a tool for analyzing Python source code
and generating Python bytecode. The compiler contains libraries to
generate an abstract syntax tree from Python source code and to
generate Python bytecode from the tree.
The \refmodule{compiler} package is a Python source to bytecode
translator written in Python. It uses the built-in parser and
standard \refmodule{parser} module to generated a concrete syntax
tree. This tree is used to generate an abstract syntax tree (AST) and
then Python bytecode.
The full functionality of the package duplicates the builtin compiler
provided with the Python interpreter. It is intended to match its
behavior almost exactly. Why implement another compiler that does the
same thing? The package is useful for a variety of purposes. It can
be modified more easily than the builtin compiler. The AST it
generates is useful for analyzing Python source code.
This chapter explains how the various components of the
\refmodule{compiler} package work. It blends reference material with
a tutorial.
The following modules are part of the \refmodule{compiler} package:
\localmoduletable
\section{The basic interface}
\declaremodule{}{compiler}
The top-level of the package defines four functions. If you import
\module{compiler}, you will get these functions and a collection of
modules contained in the package.
\begin{funcdesc}{parse}{buf}
Returns an abstract syntax tree for the Python source code in \var{buf}.
The function raises \exception{SyntaxError} if there is an error in the
source code. The return value is a \class{compiler.ast.Module} instance
that contains the tree.
\end{funcdesc}
\begin{funcdesc}{parseFile}{path}
Return an abstract syntax tree for the Python source code in the file
specified by \var{path}. It is equivalent to
\code{parse(open(\var{path}).read())}.
\end{funcdesc}
\begin{funcdesc}{walk}{ast, visitor\optional{, verbose}}
Do a pre-order walk over the abstract syntax tree \var{ast}. Call the
appropriate method on the \var{visitor} instance for each node
encountered.
\end{funcdesc}
\begin{funcdesc}{compile}{source, filename, mode, flags=None,
dont_inherit=None}
Compile the string \var{source}, a Python module, statement or
expression, into a code object that can be executed by the exec
statement or \function{eval()}. This function is a replacement for the
built-in \function{compile()} function.
The \var{filename} will be used for run-time error messages.
The \var{mode} must be 'exec' to compile a module, 'single' to compile a
single (interactive) statement, or 'eval' to compile an expression.
The \var{flags} and \var{dont_inherit} arguments affect future-related
statements, but are not supported yet.
\end{funcdesc}
\begin{funcdesc}{compileFile}{source}
Compiles the file \var{source} and generates a .pyc file.
\end{funcdesc}
The \module{compiler} package contains the following modules:
\refmodule[compiler.ast]{ast}, \module{consts}, \module{future},
\module{misc}, \module{pyassem}, \module{pycodegen}, \module{symbols},
\module{transformer}, and \refmodule[compiler.visitor]{visitor}.
\section{Limitations}
There are some problems with the error checking of the compiler
package. The interpreter detects syntax errors in two distinct
phases. One set of errors is detected by the interpreter's parser,
the other set by the compiler. The compiler package relies on the
interpreter's parser, so it get the first phases of error checking for
free. It implements the second phase itself, and that implementation is
incomplete. For example, the compiler package does not raise an error
if a name appears more than once in an argument list:
\code{def f(x, x): ...}
A future version of the compiler should fix these problems.
\section{Python Abstract Syntax}
The \module{compiler.ast} module defines an abstract syntax for
Python. In the abstract syntax tree, each node represents a syntactic
construct. The root of the tree is \class{Module} object.
The abstract syntax offers a higher level interface to parsed Python
source code. The \refmodule{parser}
module and the compiler written in C for the Python interpreter use a
concrete syntax tree. The concrete syntax is tied closely to the
grammar description used for the Python parser. Instead of a single
node for a construct, there are often several levels of nested nodes
that are introduced by Python's precedence rules.
The abstract syntax tree is created by the
\module{compiler.transformer} module. The transformer relies on the
builtin Python parser to generate a concrete syntax tree. It
generates an abstract syntax tree from the concrete tree.
The \module{transformer} module was created by Greg
Stein\index{Stein, Greg} and Bill Tutt\index{Tutt, Bill} for an
experimental Python-to-C compiler. The current version contains a
number of modifications and improvements, but the basic form of the
abstract syntax and of the transformer are due to Stein and Tutt.
\subsection{AST Nodes}
\declaremodule{}{compiler.ast}
The \module{compiler.ast} module is generated from a text file that
describes each node type and its elements. Each node type is
represented as a class that inherits from the abstract base class
\class{compiler.ast.Node} and defines a set of named attributes for
child nodes.
\begin{classdesc}{Node}{}
The \class{Node} instances are created automatically by the parser
generator. The recommended interface for specific \class{Node}
instances is to use the public attributes to access child nodes. A
public attribute may be bound to a single node or to a sequence of
nodes, depending on the \class{Node} type. For example, the
\member{bases} attribute of the \class{Class} node, is bound to a
list of base class nodes, and the \member{doc} attribute is bound to
a single node.
Each \class{Node} instance has a \member{lineno} attribute which may
be \code{None}. XXX Not sure what the rules are for which nodes
will have a useful lineno.
\end{classdesc}
All \class{Node} objects offer the following methods:
\begin{methoddesc}{getChildren}{}
Returns a flattened list of the child nodes and objects in the
order they occur. Specifically, the order of the nodes is the
order in which they appear in the Python grammar. Not all of the
children are \class{Node} instances. The names of functions and
classes, for example, are plain strings.
\end{methoddesc}
\begin{methoddesc}{getChildNodes}{}
Returns a flattened list of the child nodes in the order they
occur. This method is like \method{getChildren()}, except that it
only returns those children that are \class{Node} instances.
\end{methoddesc}
Two examples illustrate the general structure of \class{Node}
classes. The \keyword{while} statement is defined by the following
grammar production:
\begin{verbatim}
while_stmt: "while" expression ":" suite
["else" ":" suite]
\end{verbatim}
The \class{While} node has three attributes: \member{test},
\member{body}, and \member{else_}. (If the natural name for an
attribute is also a Python reserved word, it can't be used as an
attribute name. An underscore is appended to the word to make it a
legal identifier, hence \member{else_} instead of \keyword{else}.)
The \keyword{if} statement is more complicated because it can include
several tests.
\begin{verbatim}
if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
\end{verbatim}
The \class{If} node only defines two attributes: \member{tests} and
\member{else_}. The \member{tests} attribute is a sequence of test
expression, consequent body pairs. There is one pair for each
\keyword{if}/\keyword{elif} clause. The first element of the pair is
the test expression. The second elements is a \class{Stmt} node that
contains the code to execute if the test is true.
The \method{getChildren()} method of \class{If} returns a flat list of
child nodes. If there are three \keyword{if}/\keyword{elif} clauses
and no \keyword{else} clause, then \method{getChildren()} will return
a list of six elements: the first test expression, the first
\class{Stmt}, the second text expression, etc.
The following table lists each of the \class{Node} subclasses defined
in \module{compiler.ast} and each of the public attributes available
on their instances. The values of most of the attributes are
themselves \class{Node} instances or sequences of instances. When the
value is something other than an instance, the type is noted in the
comment. The attributes are listed in the order in which they are
returned by \method{getChildren()} and \method{getChildNodes()}.
\input{asttable}
\subsection{Assignment nodes}
There is a collection of nodes used to represent assignments. Each
assignment statement in the source code becomes a single
\class{Assign} node in the AST. The \member{nodes} attribute is a
list that contains a node for each assignment target. This is
necessary because assignment can be chained, e.g. \code{a = b = 2}.
Each \class{Node} in the list will be one of the following classes:
\class{AssAttr}, \class{AssList}, \class{AssName}, or
\class{AssTuple}.
Each target assignment node will describe the kind of object being
assigned to: \class{AssName} for a simple name, e.g. \code{a = 1}.
\class{AssAttr} for an attribute assigned, e.g. \code{a.x = 1}.
\class{AssList} and \class{AssTuple} for list and tuple expansion
respectively, e.g. \code{a, b, c = a_tuple}.
The target assignment nodes also have a \member{flags} attribute that
indicates whether the node is being used for assignment or in a delete
statement. The \class{AssName} is also used to represent a delete
statement, e.g. \class{del x}.
When an expression contains several attribute references, an
assignment or delete statement will contain only one \class{AssAttr}
node -- for the final attribute reference. The other attribute
references will be represented as \class{Getattr} nodes in the
\member{expr} attribute of the \class{AssAttr} instance.
\subsection{Examples}
This section shows several simple examples of ASTs for Python source
code. The examples demonstrate how to use the \function{parse()}
function, what the repr of an AST looks like, and how to access
attributes of an AST node.
The first module defines a single function. Assume it is stored in
\file{/tmp/doublelib.py}.
\begin{verbatim}
"""This is an example module.
This is the docstring.
"""
def double(x):
"Return twice the argument"
return x * 2
\end{verbatim}
In the interactive interpreter session below, I have reformatted the
long AST reprs for readability. The AST reprs use unqualified class
names. If you want to create an instance from a repr, you must import
the class names from the \module{compiler.ast} module.
\begin{verbatim}
>>> import compiler
>>> mod = compiler.parseFile("/tmp/doublelib.py")
>>> mod
Module('This is an example module.\n\nThis is the docstring.\n',
Stmt([Function(None, 'double', ['x'], [], 0,
'Return twice the argument',
Stmt([Return(Mul((Name('x'), Const(2))))]))]))
>>> from compiler.ast import *
>>> Module('This is an example module.\n\nThis is the docstring.\n',
... Stmt([Function(None, 'double', ['x'], [], 0,
... 'Return twice the argument',
... Stmt([Return(Mul((Name('x'), Const(2))))]))]))
Module('This is an example module.\n\nThis is the docstring.\n',
Stmt([Function(None, 'double', ['x'], [], 0,
'Return twice the argument',
Stmt([Return(Mul((Name('x'), Const(2))))]))]))
>>> mod.doc
'This is an example module.\n\nThis is the docstring.\n'
>>> for node in mod.node.nodes:
... print node
...
Function(None, 'double', ['x'], [], 0, 'Return twice the argument',
Stmt([Return(Mul((Name('x'), Const(2))))]))
>>> func = mod.node.nodes[0]
>>> func.code
Stmt([Return(Mul((Name('x'), Const(2))))])
\end{verbatim}
\section{Using Visitors to Walk ASTs}
\declaremodule{}{compiler.visitor}
The visitor pattern is ... The \refmodule{compiler} package uses a
variant on the visitor pattern that takes advantage of Python's
introspection features to eliminate the need for much of the visitor's
infrastructure.
The classes being visited do not need to be programmed to accept
visitors. The visitor need only define visit methods for classes it
is specifically interested in; a default visit method can handle the
rest.
XXX The magic \method{visit()} method for visitors.
\begin{funcdesc}{walk}{tree, visitor\optional{, verbose}}
\end{funcdesc}
\begin{classdesc}{ASTVisitor}{}
The \class{ASTVisitor} is responsible for walking over the tree in the
correct order. A walk begins with a call to \method{preorder()}. For
each node, it checks the \var{visitor} argument to \method{preorder()}
for a method named `visitNodeType,' where NodeType is the name of the
node's class, e.g. for a \class{While} node a \method{visitWhile()}
would be called. If the method exists, it is called with the node as
its first argument.
The visitor method for a particular node type can control how child
nodes are visited during the walk. The \class{ASTVisitor} modifies
the visitor argument by adding a visit method to the visitor; this
method can be used to visit a particular child node. If no visitor is
found for a particular node type, the \method{default()} method is
called.
\end{classdesc}
\class{ASTVisitor} objects have the following methods:
XXX describe extra arguments
\begin{methoddesc}{default}{node\optional{, \moreargs}}
\end{methoddesc}
\begin{methoddesc}{dispatch}{node\optional{, \moreargs}}
\end{methoddesc}
\begin{methoddesc}{preorder}{tree, visitor}
\end{methoddesc}
\section{Bytecode Generation}
The code generator is a visitor that emits bytecodes. Each visit method
can call the \method{emit()} method to emit a new bytecode. The basic
code generator is specialized for modules, classes, and functions. An
assembler converts that emitted instructions to the low-level bytecode
format. It handles things like generator of constant lists of code
objects and calculation of jump offsets.

View File

@ -182,8 +182,6 @@ and how to embed it in other applications.
\input{libcrypto} % Cryptographic Services
\input{libhashlib}
\input{libhmac}
\input{libmd5}
\input{libsha}
% =============
% FILE & DATABASE STORAGE
@ -388,9 +386,6 @@ and how to embed it in other applications.
\input{custominterp} % Custom interpreter
\input{libcode}
\input{libcodeop}
\input{librestricted} % Restricted Execution
\input{librexec}
\input{libbastion}
\input{modules} % Importing Modules
@ -419,7 +414,6 @@ and how to embed it in other applications.
\input{libpickletools}
\input{distutils}
\input{compiler} % compiler package
\input{libast}
\input{libmisc} % Miscellaneous Services
@ -434,9 +428,6 @@ and how to embed it in other applications.
%\input{libstdwin} % STDWIN ONLY
\input{libjpeg}
%\input{libpanel}
\input{libsun} % SUNOS ONLY
\input{libsunaudio}
% XXX(nnorwitz): the modules below this comment should be kept.

View File

@ -1,57 +0,0 @@
\section{\module{Bastion} ---
Restricting access to objects}
\declaremodule{standard}{Bastion}
\modulesynopsis{Providing restricted access to objects.}
\moduleauthor{Barry Warsaw}{bwarsaw@python.org}
\versionchanged[Disabled module]{2.3}
\begin{notice}[warning]
The documentation has been left in place to help in reading old code
that uses the module.
\end{notice}
% I'm concerned that the word 'bastion' won't be understood by people
% for whom English is a second language, making the module name
% somewhat mysterious. Thus, the brief definition... --amk
According to the dictionary, a bastion is ``a fortified area or
position'', or ``something that is considered a stronghold.'' It's a
suitable name for this module, which provides a way to forbid access
to certain attributes of an object. It must always be used with the
\refmodule{rexec} module, in order to allow restricted-mode programs
access to certain safe attributes of an object, while denying access
to other, unsafe attributes.
% I've punted on the issue of documenting keyword arguments for now.
\begin{funcdesc}{Bastion}{object\optional{, filter\optional{,
name\optional{, class}}}}
Protect the object \var{object}, returning a bastion for the
object. Any attempt to access one of the object's attributes will
have to be approved by the \var{filter} function; if the access is
denied an \exception{AttributeError} exception will be raised.
If present, \var{filter} must be a function that accepts a string
containing an attribute name, and returns true if access to that
attribute will be permitted; if \var{filter} returns false, the access
is denied. The default filter denies access to any function beginning
with an underscore (\character{_}). The bastion's string representation
will be \samp{<Bastion for \var{name}>} if a value for
\var{name} is provided; otherwise, \samp{repr(\var{object})} will be
used.
\var{class}, if present, should be a subclass of \class{BastionClass};
see the code in \file{bastion.py} for the details. Overriding the
default \class{BastionClass} will rarely be required.
\end{funcdesc}
\begin{classdesc}{BastionClass}{getfunc, name}
Class which actually implements bastion objects. This is the default
class used by \function{Bastion()}. The \var{getfunc} parameter is a
function which returns the value of an attribute which should be
exposed to the restricted execution environment when called with the
name of the attribute as the only parameter. \var{name} is used to
construct the \function{repr()} of the \class{BastionClass} instance.
\end{classdesc}

View File

@ -1,80 +0,0 @@
\section{\module{jpeg} ---
Read and write JPEG files}
\declaremodule{builtin}{jpeg}
\platform{IRIX}
\modulesynopsis{Read and write image files in compressed JPEG format.}
The module \module{jpeg} provides access to the jpeg compressor and
decompressor written by the Independent JPEG Group
\index{Independent JPEG Group}(IJG). JPEG is a standard for
compressing pictures; it is defined in ISO 10918. For details on JPEG
or the Independent JPEG Group software refer to the JPEG standard or
the documentation provided with the software.
A portable interface to JPEG image files is available with the Python
Imaging Library (PIL) by Fredrik Lundh. Information on PIL is
available at \url{http://www.pythonware.com/products/pil/}.
\index{Python Imaging Library}
\index{PIL (the Python Imaging Library)}
\index{Lundh, Fredrik}
The \module{jpeg} module defines an exception and some functions.
\begin{excdesc}{error}
Exception raised by \function{compress()} and \function{decompress()}
in case of errors.
\end{excdesc}
\begin{funcdesc}{compress}{data, w, h, b}
Treat data as a pixmap of width \var{w} and height \var{h}, with
\var{b} bytes per pixel. The data is in SGI GL order, so the first
pixel is in the lower-left corner. This means that \function{gl.lrectread()}
return data can immediately be passed to \function{compress()}.
Currently only 1 byte and 4 byte pixels are allowed, the former being
treated as greyscale and the latter as RGB color.
\function{compress()} returns a string that contains the compressed
picture, in JFIF\index{JFIF} format.
\end{funcdesc}
\begin{funcdesc}{decompress}{data}
Data is a string containing a picture in JFIF\index{JFIF} format. It
returns a tuple \code{(\var{data}, \var{width}, \var{height},
\var{bytesperpixel})}. Again, the data is suitable to pass to
\function{gl.lrectwrite()}.
\end{funcdesc}
\begin{funcdesc}{setoption}{name, value}
Set various options. Subsequent \function{compress()} and
\function{decompress()} calls will use these options. The following
options are available:
\begin{tableii}{l|p{3in}}{code}{Option}{Effect}
\lineii{'forcegray'}{%
Force output to be grayscale, even if input is RGB.}
\lineii{'quality'}{%
Set the quality of the compressed image to a value between
\code{0} and \code{100} (default is \code{75}). This only affects
compression.}
\lineii{'optimize'}{%
Perform Huffman table optimization. Takes longer, but results in
smaller compressed image. This only affects compression.}
\lineii{'smooth'}{%
Perform inter-block smoothing on uncompressed image. Only useful
for low-quality images. This only affects decompression.}
\end{tableii}
\end{funcdesc}
\begin{seealso}
\seetitle{JPEG Still Image Data Compression Standard}{The
canonical reference for the JPEG image format, by
Pennebaker and Mitchell.}
\seetitle[http://www.w3.org/Graphics/JPEG/itu-t81.pdf]{Information
Technology - Digital Compression and Coding of
Continuous-tone Still Images - Requirements and
Guidelines}{The ISO standard for JPEG is also published as
ITU T.81. This is available online in PDF form.}
\end{seealso}

View File

@ -1,92 +0,0 @@
\section{\module{md5} ---
MD5 message digest algorithm}
\declaremodule{builtin}{md5}
\modulesynopsis{RSA's MD5 message digest algorithm.}
\deprecated{2.5}{Use the \refmodule{hashlib} module instead.}
This module implements the interface to RSA's MD5 message digest
\index{message digest, MD5}
algorithm (see also Internet \rfc{1321}). Its use is quite
straightforward:\ use \function{new()} to create an md5 object.
You can now feed this object with arbitrary strings using the
\method{update()} method, and at any point you can ask it for the
\dfn{digest} (a strong kind of 128-bit checksum,
a.k.a. ``fingerprint'') of the concatenation of the strings fed to it
so far using the \method{digest()} method.
\index{checksum!MD5}
For example, to obtain the digest of the string \code{'Nobody inspects
the spammish repetition'}:
\begin{verbatim}
>>> import md5
>>> m = md5.new()
>>> m.update("Nobody inspects")
>>> m.update(" the spammish repetition")
>>> m.digest()
'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
\end{verbatim}
More condensed:
\begin{verbatim}
>>> md5.new("Nobody inspects the spammish repetition").digest()
'\xbbd\x9c\x83\xdd\x1e\xa5\xc9\xd9\xde\xc9\xa1\x8d\xf0\xff\xe9'
\end{verbatim}
The following values are provided as constants in the module and as
attributes of the md5 objects returned by \function{new()}:
\begin{datadesc}{digest_size}
The size of the resulting digest in bytes. This is always
\code{16}.
\end{datadesc}
The md5 module provides the following functions:
\begin{funcdesc}{new}{\optional{arg}}
Return a new md5 object. If \var{arg} is present, the method call
\code{update(\var{arg})} is made.
\end{funcdesc}
\begin{funcdesc}{md5}{\optional{arg}}
For backward compatibility reasons, this is an alternative name for the
\function{new()} function.
\end{funcdesc}
An md5 object has the following methods:
\begin{methoddesc}[md5]{update}{arg}
Update the md5 object with the string \var{arg}. Repeated calls are
equivalent to a single call with the concatenation of all the
arguments: \code{m.update(a); m.update(b)} is equivalent to
\code{m.update(a+b)}.
\end{methoddesc}
\begin{methoddesc}[md5]{digest}{}
Return the digest of the strings passed to the \method{update()}
method so far. This is a 16-byte string which may contain
non-\ASCII{} characters, including null bytes.
\end{methoddesc}
\begin{methoddesc}[md5]{hexdigest}{}
Like \method{digest()} except the digest is returned as a string of
length 32, containing only hexadecimal digits. This may
be used to exchange the value safely in email or other non-binary
environments.
\end{methoddesc}
\begin{methoddesc}[md5]{copy}{}
Return a copy (``clone'') of the md5 object. This can be used to
efficiently compute the digests of strings that share a common initial
substring.
\end{methoddesc}
\begin{seealso}
\seemodule{sha}{Similar module implementing the Secure Hash
Algorithm (SHA). The SHA algorithm is considered a
more secure hash.}
\end{seealso}

View File

@ -1,74 +0,0 @@
\section{\module{panel} ---
None}
\declaremodule{standard}{panel}
\modulesynopsis{None}
\strong{Please note:} The FORMS library, to which the
\code{fl}\refbimodindex{fl} module described above interfaces, is a
simpler and more accessible user interface library for use with GL
than the \code{panel} module (besides also being by a Dutch author).
This module should be used instead of the built-in module
\code{pnl}\refbimodindex{pnl}
to interface with the
\emph{Panel Library}.
The module is too large to document here in its entirety.
One interesting function:
\begin{funcdesc}{defpanellist}{filename}
Parses a panel description file containing S-expressions written by the
\emph{Panel Editor}
that accompanies the Panel Library and creates the described panels.
It returns a list of panel objects.
\end{funcdesc}
\warning{The Python interpreter will dump core if you don't create a
GL window before calling
\code{panel.mkpanel()}
or
\code{panel.defpanellist()}.}
\section{\module{panelparser} ---
None}
\declaremodule{standard}{panelparser}
\modulesynopsis{None}
This module defines a self-contained parser for S-expressions as output
by the Panel Editor (which is written in Scheme so it can't help writing
S-expressions).
The relevant function is
\code{panelparser.parse_file(\var{file})}
which has a file object (not a filename!) as argument and returns a list
of parsed S-expressions.
Each S-expression is converted into a Python list, with atoms converted
to Python strings and sub-expressions (recursively) to Python lists.
For more details, read the module file.
% XXXXJH should be funcdesc, I think
\section{\module{pnl} ---
None}
\declaremodule{builtin}{pnl}
\modulesynopsis{None}
This module provides access to the
\emph{Panel Library}
built by NASA Ames\index{NASA} (to get it, send email to
\code{panel-request@nas.nasa.gov}).
All access to it should be done through the standard module
\code{panel}\refstmodindex{panel},
which transparently exports most functions from
\code{pnl}
but redefines
\code{pnl.dopanel()}.
\warning{The Python interpreter will dump core if you don't create a
GL window before calling \code{pnl.mkpanel()}.}
The module is too large to document here in its entirety.

View File

@ -1,66 +0,0 @@
\chapter{Restricted Execution \label{restricted}}
\begin{notice}[warning]
In Python 2.3 these modules have been disabled due to various known
and not readily fixable security holes. The modules are still
documented here to help in reading old code that uses the
\module{rexec} and \module{Bastion} modules.
\end{notice}
\emph{Restricted execution} is the basic framework in Python that allows
for the segregation of trusted and untrusted code. The framework is based on the
notion that trusted Python code (a \emph{supervisor}) can create a
``padded cell' (or environment) with limited permissions, and run the
untrusted code within this cell. The untrusted code cannot break out
of its cell, and can only interact with sensitive system resources
through interfaces defined and managed by the trusted code. The term
``restricted execution'' is favored over ``safe-Python''
since true safety is hard to define, and is determined by the way the
restricted environment is created. Note that the restricted
environments can be nested, with inner cells creating subcells of
lesser, but never greater, privilege.
An interesting aspect of Python's restricted execution model is that
the interfaces presented to untrusted code usually have the same names
as those presented to trusted code. Therefore no special interfaces
need to be learned to write code designed to run in a restricted
environment. And because the exact nature of the padded cell is
determined by the supervisor, different restrictions can be imposed,
depending on the application. For example, it might be deemed
``safe'' for untrusted code to read any file within a specified
directory, but never to write a file. In this case, the supervisor
may redefine the built-in \function{open()} function so that it raises
an exception whenever the \var{mode} parameter is \code{'w'}. It
might also perform a \cfunction{chroot()}-like operation on the
\var{filename} parameter, such that root is always relative to some
safe ``sandbox'' area of the filesystem. In this case, the untrusted
code would still see an built-in \function{open()} function in its
environment, with the same calling interface. The semantics would be
identical too, with \exception{IOError}s being raised when the
supervisor determined that an unallowable parameter is being used.
The Python run-time determines whether a particular code block is
executing in restricted execution mode based on the identity of the
\code{__builtins__} object in its global variables: if this is (the
dictionary of) the standard \refmodule[builtin]{__builtin__} module,
the code is deemed to be unrestricted, else it is deemed to be
restricted.
Python code executing in restricted mode faces a number of limitations
that are designed to prevent it from escaping from the padded cell.
For instance, the function object attribute \member{func_globals} and
the class and instance object attribute \member{__dict__} are
unavailable.
Two modules provide the framework for setting up restricted execution
environments:
\localmoduletable
\begin{seealso}
\seetitle[http://grail.sourceforge.net/]{Grail Home Page}
{Grail, an Internet browser written in Python, uses these
modules to support Python applets. More
information on the use of Python's restricted execution
mode in Grail is available on the Web site.}
\end{seealso}

View File

@ -1,275 +0,0 @@
\section{\module{rexec} ---
Restricted execution framework}
\declaremodule{standard}{rexec}
\modulesynopsis{Basic restricted execution framework.}
\versionchanged[Disabled module]{2.3}
\begin{notice}[warning]
The documentation has been left in place to help in reading old code
that uses the module.
\end{notice}
This module contains the \class{RExec} class, which supports
\method{r_exec()}, \method{r_eval()}, \method{r_execfile()}, and
\method{r_import()} methods, which are restricted versions of the standard
Python functions \method{exec()}, \method{eval()}, \method{execfile()} and
the \keyword{import} statement.
Code executed in this restricted environment will
only have access to modules and functions that are deemed safe; you
can subclass \class{RExec} to add or remove capabilities as desired.
\begin{notice}[warning]
While the \module{rexec} module is designed to perform as described
below, it does have a few known vulnerabilities which could be
exploited by carefully written code. Thus it should not be relied
upon in situations requiring ``production ready'' security. In such
situations, execution via sub-processes or very careful
``cleansing'' of both code and data to be processed may be
necessary. Alternatively, help in patching known \module{rexec}
vulnerabilities would be welcomed.
\end{notice}
\begin{notice}
The \class{RExec} class can prevent code from performing unsafe
operations like reading or writing disk files, or using TCP/IP
sockets. However, it does not protect against code using extremely
large amounts of memory or processor time.
\end{notice}
\begin{classdesc}{RExec}{\optional{hooks\optional{, verbose}}}
Returns an instance of the \class{RExec} class.
\var{hooks} is an instance of the \class{RHooks} class or a subclass of it.
If it is omitted or \code{None}, the default \class{RHooks} class is
instantiated.
Whenever the \module{rexec} module searches for a module (even a
built-in one) or reads a module's code, it doesn't actually go out to
the file system itself. Rather, it calls methods of an \class{RHooks}
instance that was passed to or created by its constructor. (Actually,
the \class{RExec} object doesn't make these calls --- they are made by
a module loader object that's part of the \class{RExec} object. This
allows another level of flexibility, which can be useful when changing
the mechanics of \keyword{import} within the restricted environment.)
By providing an alternate \class{RHooks} object, we can control the
file system accesses made to import a module, without changing the
actual algorithm that controls the order in which those accesses are
made. For instance, we could substitute an \class{RHooks} object that
passes all filesystem requests to a file server elsewhere, via some
RPC mechanism such as ILU. Grail's applet loader uses this to support
importing applets from a URL for a directory.
If \var{verbose} is true, additional debugging output may be sent to
standard output.
\end{classdesc}
It is important to be aware that code running in a restricted
environment can still call the \function{sys.exit()} function. To
disallow restricted code from exiting the interpreter, always protect
calls that cause restricted code to run with a
\keyword{try}/\keyword{except} statement that catches the
\exception{SystemExit} exception. Removing the \function{sys.exit()}
function from the restricted environment is not sufficient --- the
restricted code could still use \code{raise SystemExit}. Removing
\exception{SystemExit} is not a reasonable option; some library code
makes use of this and would break were it not available.
\begin{seealso}
\seetitle[http://grail.sourceforge.net/]{Grail Home Page}{Grail is a
Web browser written entirely in Python. It uses the
\module{rexec} module as a foundation for supporting
Python applets, and can be used as an example usage of
this module.}
\end{seealso}
\subsection{RExec Objects \label{rexec-objects}}
\class{RExec} instances support the following methods:
\begin{methoddesc}[RExec]{r_eval}{code}
\var{code} must either be a string containing a Python expression, or
a compiled code object, which will be evaluated in the restricted
environment's \module{__main__} module. The value of the expression or
code object will be returned.
\end{methoddesc}
\begin{methoddesc}[RExec]{r_exec}{code}
\var{code} must either be a string containing one or more lines of
Python code, or a compiled code object, which will be executed in the
restricted environment's \module{__main__} module.
\end{methoddesc}
\begin{methoddesc}[RExec]{r_execfile}{filename}
Execute the Python code contained in the file \var{filename} in the
restricted environment's \module{__main__} module.
\end{methoddesc}
Methods whose names begin with \samp{s_} are similar to the functions
beginning with \samp{r_}, but the code will be granted access to
restricted versions of the standard I/O streams \code{sys.stdin},
\code{sys.stderr}, and \code{sys.stdout}.
\begin{methoddesc}[RExec]{s_eval}{code}
\var{code} must be a string containing a Python expression, which will
be evaluated in the restricted environment.
\end{methoddesc}
\begin{methoddesc}[RExec]{s_exec}{code}
\var{code} must be a string containing one or more lines of Python code,
which will be executed in the restricted environment.
\end{methoddesc}
\begin{methoddesc}[RExec]{s_execfile}{code}
Execute the Python code contained in the file \var{filename} in the
restricted environment.
\end{methoddesc}
\class{RExec} objects must also support various methods which will be
implicitly called by code executing in the restricted environment.
Overriding these methods in a subclass is used to change the policies
enforced by a restricted environment.
\begin{methoddesc}[RExec]{r_import}{modulename\optional{, globals\optional{,
locals\optional{, fromlist}}}}
Import the module \var{modulename}, raising an \exception{ImportError}
exception if the module is considered unsafe.
\end{methoddesc}
\begin{methoddesc}[RExec]{r_open}{filename\optional{, mode\optional{, bufsize}}}
Method called when \function{open()} is called in the restricted
environment. The arguments are identical to those of \function{open()},
and a file object (or a class instance compatible with file objects)
should be returned. \class{RExec}'s default behaviour is allow opening
any file for reading, but forbidding any attempt to write a file. See
the example below for an implementation of a less restrictive
\method{r_open()}.
\end{methoddesc}
\begin{methoddesc}[RExec]{r_reload}{module}
Reload the module object \var{module}, re-parsing and re-initializing it.
\end{methoddesc}
\begin{methoddesc}[RExec]{r_unload}{module}
Unload the module object \var{module} (remove it from the
restricted environment's \code{sys.modules} dictionary).
\end{methoddesc}
And their equivalents with access to restricted standard I/O streams:
\begin{methoddesc}[RExec]{s_import}{modulename\optional{, globals\optional{,
locals\optional{, fromlist}}}}
Import the module \var{modulename}, raising an \exception{ImportError}
exception if the module is considered unsafe.
\end{methoddesc}
\begin{methoddesc}[RExec]{s_reload}{module}
Reload the module object \var{module}, re-parsing and re-initializing it.
\end{methoddesc}
\begin{methoddesc}[RExec]{s_unload}{module}
Unload the module object \var{module}.
% XXX what are the semantics of this?
\end{methoddesc}
\subsection{Defining restricted environments \label{rexec-extension}}
The \class{RExec} class has the following class attributes, which are
used by the \method{__init__()} method. Changing them on an existing
instance won't have any effect; instead, create a subclass of
\class{RExec} and assign them new values in the class definition.
Instances of the new class will then use those new values. All these
attributes are tuples of strings.
\begin{memberdesc}[RExec]{nok_builtin_names}
Contains the names of built-in functions which will \emph{not} be
available to programs running in the restricted environment. The
value for \class{RExec} is \code{('open', 'reload', '__import__')}.
(This gives the exceptions, because by far the majority of built-in
functions are harmless. A subclass that wants to override this
variable should probably start with the value from the base class and
concatenate additional forbidden functions --- when new dangerous
built-in functions are added to Python, they will also be added to
this module.)
\end{memberdesc}
\begin{memberdesc}[RExec]{ok_builtin_modules}
Contains the names of built-in modules which can be safely imported.
The value for \class{RExec} is \code{('audioop', 'array', 'binascii',
'cmath', 'errno', 'imageop', 'marshal', 'math', 'md5', 'operator',
'parser', 'regex', 'select', 'sha', '_sre', 'strop',
'struct', 'time')}. A similar remark about overriding this variable
applies --- use the value from the base class as a starting point.
\end{memberdesc}
\begin{memberdesc}[RExec]{ok_path}
Contains the directories which will be searched when an \keyword{import}
is performed in the restricted environment.
The value for \class{RExec} is the same as \code{sys.path} (at the time
the module is loaded) for unrestricted code.
\end{memberdesc}
\begin{memberdesc}[RExec]{ok_posix_names}
% Should this be called ok_os_names?
Contains the names of the functions in the \refmodule{os} module which will be
available to programs running in the restricted environment. The
value for \class{RExec} is \code{('error', 'fstat', 'listdir',
'lstat', 'readlink', 'stat', 'times', 'uname', 'getpid', 'getppid',
'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')}.
\end{memberdesc}
\begin{memberdesc}[RExec]{ok_sys_names}
Contains the names of the functions and variables in the \refmodule{sys}
module which will be available to programs running in the restricted
environment. The value for \class{RExec} is \code{('ps1', 'ps2',
'copyright', 'version', 'platform', 'exit', 'maxint')}.
\end{memberdesc}
\begin{memberdesc}[RExec]{ok_file_types}
Contains the file types from which modules are allowed to be loaded.
Each file type is an integer constant defined in the \refmodule{imp} module.
The meaningful values are \constant{PY_SOURCE}, \constant{PY_COMPILED}, and
\constant{C_EXTENSION}. The value for \class{RExec} is \code{(C_EXTENSION,
PY_SOURCE)}. Adding \constant{PY_COMPILED} in subclasses is not recommended;
an attacker could exit the restricted execution mode by putting a forged
byte-compiled file (\file{.pyc}) anywhere in your file system, for example
by writing it to \file{/tmp} or uploading it to the \file{/incoming}
directory of your public FTP server.
\end{memberdesc}
\subsection{An example}
Let us say that we want a slightly more relaxed policy than the
standard \class{RExec} class. For example, if we're willing to allow
files in \file{/tmp} to be written, we can subclass the \class{RExec}
class:
\begin{verbatim}
class TmpWriterRExec(rexec.RExec):
def r_open(self, file, mode='r', buf=-1):
if mode in ('r', 'rb'):
pass
elif mode in ('w', 'wb', 'a', 'ab'):
# check filename : must begin with /tmp/
if file[:5]!='/tmp/':
raise IOError, "can't write outside /tmp"
elif (string.find(file, '/../') >= 0 or
file[:3] == '../' or file[-3:] == '/..'):
raise IOError, "'..' in filename forbidden"
else: raise IOError, "Illegal open() mode"
return open(file, mode, buf)
\end{verbatim}
%
Notice that the above code will occasionally forbid a perfectly valid
filename; for example, code in the restricted environment won't be
able to open a file called \file{/tmp/foo/../bar}. To fix this, the
\method{r_open()} method would have to simplify the filename to
\file{/tmp/bar}, which would require splitting apart the filename and
performing various operations on it. In cases where security is at
stake, it may be preferable to write simple code which is sometimes
overly restrictive, instead of more general code that is also more
complex and may harbor a subtle security hole.

View File

@ -1,83 +0,0 @@
\section{\module{sha} ---
SHA-1 message digest algorithm}
\declaremodule{builtin}{sha}
\modulesynopsis{NIST's secure hash algorithm, SHA.}
\sectionauthor{Fred L. Drake, Jr.}{fdrake@acm.org}
\deprecated{2.5}{Use the \refmodule{hashlib} module instead.}
This module implements the interface to NIST's\index{NIST} secure hash
algorithm,\index{Secure Hash Algorithm} known as SHA-1. SHA-1 is an
improved version of the original SHA hash algorithm. It is used in
the same way as the \refmodule{md5} module:\ use \function{new()}
to create an sha object, then feed this object with arbitrary strings
using the \method{update()} method, and at any point you can ask it
for the \dfn{digest} of the concatenation of the strings fed to it
so far.\index{checksum!SHA} SHA-1 digests are 160 bits instead of
MD5's 128 bits.
\begin{funcdesc}{new}{\optional{string}}
Return a new sha object. If \var{string} is present, the method
call \code{update(\var{string})} is made.
\end{funcdesc}
The following values are provided as constants in the module and as
attributes of the sha objects returned by \function{new()}:
\begin{datadesc}{blocksize}
Size of the blocks fed into the hash function; this is always
\code{1}. This size is used to allow an arbitrary string to be
hashed.
\end{datadesc}
\begin{datadesc}{digest_size}
The size of the resulting digest in bytes. This is always
\code{20}.
\end{datadesc}
An sha object has the same methods as md5 objects:
\begin{methoddesc}[sha]{update}{arg}
Update the sha object with the string \var{arg}. Repeated calls are
equivalent to a single call with the concatenation of all the
arguments: \code{m.update(a); m.update(b)} is equivalent to
\code{m.update(a+b)}.
\end{methoddesc}
\begin{methoddesc}[sha]{digest}{}
Return the digest of the strings passed to the \method{update()}
method so far. This is a 20-byte string which may contain
non-\ASCII{} characters, including null bytes.
\end{methoddesc}
\begin{methoddesc}[sha]{hexdigest}{}
Like \method{digest()} except the digest is returned as a string of
length 40, containing only hexadecimal digits. This may
be used to exchange the value safely in email or other non-binary
environments.
\end{methoddesc}
\begin{methoddesc}[sha]{copy}{}
Return a copy (``clone'') of the sha object. This can be used to
efficiently compute the digests of strings that share a common initial
substring.
\end{methoddesc}
\begin{seealso}
\seetitle[http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf]
{Secure Hash Standard}
{The Secure Hash Algorithm is defined by NIST document FIPS
PUB 180-2:
\citetitle[http://csrc.nist.gov/publications/fips/fips180-2/fips180-2withchangenotice.pdf]
{Secure Hash Standard}, published in August 2002.}
\seetitle[http://csrc.nist.gov/encryption/tkhash.html]
{Cryptographic Toolkit (Secure Hashing)}
{Links from NIST to various information on secure hashing.}
\end{seealso}

View File

@ -941,18 +941,15 @@ stack frame; \member{f_code} is the code object being executed in this
frame; \member{f_locals} is the dictionary used to look up local
variables; \member{f_globals} is used for global variables;
\member{f_builtins} is used for built-in (intrinsic) names;
\member{f_restricted} is a flag indicating whether the function is
executing in restricted execution mode; \member{f_lasti} gives the
precise instruction (this is an index into the bytecode string of
the code object).
\member{f_lasti} gives the precise instruction (this is an index into
the bytecode string of the code object).
\withsubitem{(frame attribute)}{
\ttindex{f_back}
\ttindex{f_code}
\ttindex{f_globals}
\ttindex{f_locals}
\ttindex{f_lasti}
\ttindex{f_builtins}
\ttindex{f_restricted}}
\ttindex{f_builtins}}
Special writable attributes: \member{f_trace}, if not \code{None}, is
a function called at the start of each source code line (this is used

View File

@ -33,7 +33,6 @@ PyAPI_FUNC(PyObject *) PyEval_GetBuiltins(void);
PyAPI_FUNC(PyObject *) PyEval_GetGlobals(void);
PyAPI_FUNC(PyObject *) PyEval_GetLocals(void);
PyAPI_FUNC(struct _frame *) PyEval_GetFrame(void);
PyAPI_FUNC(int) PyEval_GetRestricted(void);
/* Look at the current frame's (if any) code's co_flags, and turn on
the corresponding compiler flags in cf->cf_flags. Return 1 if any

View File

@ -52,8 +52,6 @@ typedef struct _frame {
PyAPI_DATA(PyTypeObject) PyFrame_Type;
#define PyFrame_Check(op) ((op)->ob_type == &PyFrame_Type)
#define PyFrame_IsRestricted(f) \
((f)->f_builtins != (f)->f_tstate->interp->builtins)
PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *,
PyObject *, PyObject *);

View File

@ -1,177 +0,0 @@
"""Bastionification utility.
A bastion (for another object -- the 'original') is an object that has
the same methods as the original but does not give access to its
instance variables. Bastions have a number of uses, but the most
obvious one is to provide code executing in restricted mode with a
safe interface to an object implemented in unrestricted mode.
The bastionification routine has an optional second argument which is
a filter function. Only those methods for which the filter method
(called with the method name as argument) returns true are accessible.
The default filter method returns true unless the method name begins
with an underscore.
There are a number of possible implementations of bastions. We use a
'lazy' approach where the bastion's __getattr__() discipline does all
the work for a particular method the first time it is used. This is
usually fastest, especially if the user doesn't call all available
methods. The retrieved methods are stored as instance variables of
the bastion, so the overhead is only occurred on the first use of each
method.
Detail: the bastion class has a __repr__() discipline which includes
the repr() of the original object. This is precomputed when the
bastion is created.
"""
__all__ = ["BastionClass", "Bastion"]
from types import MethodType
class BastionClass:
"""Helper class used by the Bastion() function.
You could subclass this and pass the subclass as the bastionclass
argument to the Bastion() function, as long as the constructor has
the same signature (a get() function and a name for the object).
"""
def __init__(self, get, name):
"""Constructor.
Arguments:
get - a function that gets the attribute value (by name)
name - a human-readable name for the original object
(suggestion: use repr(object))
"""
self._get_ = get
self._name_ = name
def __repr__(self):
"""Return a representation string.
This includes the name passed in to the constructor, so that
if you print the bastion during debugging, at least you have
some idea of what it is.
"""
return "<Bastion for %s>" % self._name_
def __getattr__(self, name):
"""Get an as-yet undefined attribute value.
This calls the get() function that was passed to the
constructor. The result is stored as an instance variable so
that the next time the same attribute is requested,
__getattr__() won't be invoked.
If the get() function raises an exception, this is simply
passed on -- exceptions are not cached.
"""
attribute = self._get_(name)
self.__dict__[name] = attribute
return attribute
def Bastion(object, filter = lambda name: name[:1] != '_',
name=None, bastionclass=BastionClass):
"""Create a bastion for an object, using an optional filter.
See the Bastion module's documentation for background.
Arguments:
object - the original object
filter - a predicate that decides whether a function name is OK;
by default all names are OK that don't start with '_'
name - the name of the object; default repr(object)
bastionclass - class used to create the bastion; default BastionClass
"""
raise RuntimeError, "This code is not secure in Python 2.2 and later"
# Note: we define *two* ad-hoc functions here, get1 and get2.
# Both are intended to be called in the same way: get(name).
# It is clear that the real work (getting the attribute
# from the object and calling the filter) is done in get1.
# Why can't we pass get1 to the bastion? Because the user
# would be able to override the filter argument! With get2,
# overriding the default argument is no security loophole:
# all it does is call it.
# Also notice that we can't place the object and filter as
# instance variables on the bastion object itself, since
# the user has full access to all instance variables!
def get1(name, object=object, filter=filter):
"""Internal function for Bastion(). See source comments."""
if filter(name):
attribute = getattr(object, name)
if type(attribute) == MethodType:
return attribute
raise AttributeError, name
def get2(name, get1=get1):
"""Internal function for Bastion(). See source comments."""
return get1(name)
if name is None:
name = repr(object)
return bastionclass(get2, name)
def _test():
"""Test the Bastion() function."""
class Original:
def __init__(self):
self.sum = 0
def add(self, n):
self._add(n)
def _add(self, n):
self.sum = self.sum + n
def total(self):
return self.sum
o = Original()
b = Bastion(o)
testcode = """if 1:
b.add(81)
b.add(18)
print "b.total() =", b.total()
try:
print "b.sum =", b.sum,
except:
print "inaccessible"
else:
print "accessible"
try:
print "b._add =", b._add,
except:
print "inaccessible"
else:
print "accessible"
try:
print "b._get_.__defaults__ =", map(type, b._get_.__defaults__),
except:
print "inaccessible"
else:
print "accessible"
\n"""
exec(testcode)
print('='*20, "Using rexec:", '='*20)
import rexec
r = rexec.RExec()
m = r.add_module('__main__')
m.b = b
r.r_exec(testcode)
if __name__ == '__main__':
_test()

View File

@ -1,26 +0,0 @@
"""Package for parsing and compiling Python source code
There are several functions defined at the top level that are imported
from modules contained in the package.
parse(buf, mode="exec") -> AST
Converts a string containing Python source code to an abstract
syntax tree (AST). The AST is defined in compiler.ast.
parseFile(path) -> AST
The same as parse(open(path))
walk(ast, visitor, verbose=None)
Does a pre-order walk over the ast using the visitor instance.
See compiler.visitor for details.
compile(source, filename, mode, flags=None, dont_inherit=None)
Returns a code object. A replacement for the builtin compile() function.
compileFile(filename)
Generates a .pyc file by compiling filename.
"""
from compiler.transformer import parse, parseFile
from compiler.visitor import walk
from compiler.pycodegen import compile, compileFile

File diff suppressed because it is too large Load Diff

View File

@ -1,21 +0,0 @@
# operation flags
OP_ASSIGN = 'OP_ASSIGN'
OP_DELETE = 'OP_DELETE'
OP_APPLY = 'OP_APPLY'
SC_LOCAL = 1
SC_GLOBAL = 2
SC_FREE = 3
SC_CELL = 4
SC_UNKNOWN = 5
CO_OPTIMIZED = 0x0001
CO_NEWLOCALS = 0x0002
CO_VARARGS = 0x0004
CO_VARKEYWORDS = 0x0008
CO_NESTED = 0x0010
CO_GENERATOR = 0x0020
CO_GENERATOR_ALLOWED = 0
CO_FUTURE_DIVISION = 0x2000
CO_FUTURE_ABSIMPORT = 0x4000
CO_FUTURE_WITH_STATEMENT = 0x8000

View File

@ -1,73 +0,0 @@
"""Parser for future statements
"""
from compiler import ast, walk
def is_future(stmt):
"""Return true if statement is a well-formed future statement"""
if not isinstance(stmt, ast.From):
return 0
if stmt.modname == "__future__":
return 1
else:
return 0
class FutureParser:
features = ("nested_scopes", "generators", "division",
"absolute_import", "with_statement")
def __init__(self):
self.found = {} # set
def visitModule(self, node):
stmt = node.node
for s in stmt.nodes:
if not self.check_stmt(s):
break
def check_stmt(self, stmt):
if is_future(stmt):
for name, asname in stmt.names:
if name in self.features:
self.found[name] = 1
else:
raise SyntaxError, \
"future feature %s is not defined" % name
stmt.valid_future = 1
return 1
return 0
def get_features(self):
"""Return list of features enabled by future statements"""
return self.found.keys()
class BadFutureParser:
"""Check for invalid future statements"""
def visitFrom(self, node):
if hasattr(node, 'valid_future'):
return
if node.modname != "__future__":
return
raise SyntaxError, "invalid future statement " + repr(node)
def find_futures(node):
p1 = FutureParser()
p2 = BadFutureParser()
walk(node, p1)
walk(node, p2)
return p1.get_features()
if __name__ == "__main__":
import sys
from compiler import parseFile, walk
for file in sys.argv[1:]:
print(file)
tree = parseFile(file)
v = FutureParser()
walk(tree, v)
print(v.found)
print()

View File

@ -1,73 +0,0 @@
def flatten(tup):
elts = []
for elt in tup:
if isinstance(elt, tuple):
elts = elts + flatten(elt)
else:
elts.append(elt)
return elts
class Set:
def __init__(self):
self.elts = {}
def __len__(self):
return len(self.elts)
def __contains__(self, elt):
return elt in self.elts
def add(self, elt):
self.elts[elt] = elt
def elements(self):
return list(self.elts.keys())
def has_elt(self, elt):
return elt in self.elts
def remove(self, elt):
del self.elts[elt]
def copy(self):
c = Set()
c.elts.update(self.elts)
return c
class Stack:
def __init__(self):
self.stack = []
self.pop = self.stack.pop
def __len__(self):
return len(self.stack)
def push(self, elt):
self.stack.append(elt)
def top(self):
return self.stack[-1]
def __getitem__(self, index): # needed by visitContinue()
return self.stack[index]
MANGLE_LEN = 256 # magic constant from compile.c
def mangle(name, klass):
if not name.startswith('__'):
return name
if len(name) + 2 >= MANGLE_LEN:
return name
if name.endswith('__'):
return name
try:
i = 0
while klass[i] == '_':
i = i + 1
except IndexError:
return name
klass = klass[i:]
tlen = len(klass) + len(name)
if tlen > MANGLE_LEN:
klass = klass[:MANGLE_LEN-tlen]
return "_%s%s" % (klass, name)
def set_filename(filename, tree):
"""Set the filename attribute to filename on every node in tree"""
worklist = [tree]
while worklist:
node = worklist.pop(0)
node.filename = filename
worklist.extend(node.getChildNodes())

View File

@ -1,847 +0,0 @@
"""A flow graph representation for Python bytecode"""
import dis
import new
import sys
from compiler import misc
from compiler.consts \
import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
class FlowGraph:
def __init__(self):
self.current = self.entry = Block()
self.exit = Block("exit")
self.blocks = misc.Set()
self.blocks.add(self.entry)
self.blocks.add(self.exit)
def startBlock(self, block):
if self._debug:
if self.current:
print("end", repr(self.current))
print(" next", self.current.next)
print(" ", self.current.get_children())
print(repr(block))
self.current = block
def nextBlock(self, block=None):
# XXX think we need to specify when there is implicit transfer
# from one block to the next. might be better to represent this
# with explicit JUMP_ABSOLUTE instructions that are optimized
# out when they are unnecessary.
#
# I think this strategy works: each block has a child
# designated as "next" which is returned as the last of the
# children. because the nodes in a graph are emitted in
# reverse post order, the "next" block will always be emitted
# immediately after its parent.
# Worry: maintaining this invariant could be tricky
if block is None:
block = self.newBlock()
# Note: If the current block ends with an unconditional
# control transfer, then it is incorrect to add an implicit
# transfer to the block graph. The current code requires
# these edges to get the blocks emitted in the right order,
# however. :-( If a client needs to remove these edges, call
# pruneEdges().
self.current.addNext(block)
self.startBlock(block)
def newBlock(self):
b = Block()
self.blocks.add(b)
return b
def startExitBlock(self):
self.startBlock(self.exit)
_debug = 0
def _enable_debug(self):
self._debug = 1
def _disable_debug(self):
self._debug = 0
def emit(self, *inst):
if self._debug:
print("\t", inst)
if inst[0] in ['RETURN_VALUE', 'YIELD_VALUE']:
self.current.addOutEdge(self.exit)
if len(inst) == 2 and isinstance(inst[1], Block):
self.current.addOutEdge(inst[1])
self.current.emit(inst)
def getBlocksInOrder(self):
"""Return the blocks in reverse postorder
i.e. each node appears before all of its successors
"""
# XXX make sure every node that doesn't have an explicit next
# is set so that next points to exit
for b in self.blocks.elements():
if b is self.exit:
continue
if not b.next:
b.addNext(self.exit)
order = dfs_postorder(self.entry, {})
order.reverse()
self.fixupOrder(order, self.exit)
# hack alert
if not self.exit in order:
order.append(self.exit)
return order
def fixupOrder(self, blocks, default_next):
"""Fixup bad order introduced by DFS."""
# XXX This is a total mess. There must be a better way to get
# the code blocks in the right order.
self.fixupOrderHonorNext(blocks, default_next)
self.fixupOrderForward(blocks, default_next)
def fixupOrderHonorNext(self, blocks, default_next):
"""Fix one problem with DFS.
The DFS uses child block, but doesn't know about the special
"next" block. As a result, the DFS can order blocks so that a
block isn't next to the right block for implicit control
transfers.
"""
index = {}
for i in range(len(blocks)):
index[blocks[i]] = i
for i in range(0, len(blocks) - 1):
b = blocks[i]
n = blocks[i + 1]
if not b.next or b.next[0] == default_next or b.next[0] == n:
continue
# The blocks are in the wrong order. Find the chain of
# blocks to insert where they belong.
cur = b
chain = []
elt = cur
while elt.next and elt.next[0] != default_next:
chain.append(elt.next[0])
elt = elt.next[0]
# Now remove the blocks in the chain from the current
# block list, so that they can be re-inserted.
l = []
for b in chain:
assert index[b] > i
l.append((index[b], b))
l.sort()
l.reverse()
for j, b in l:
del blocks[index[b]]
# Insert the chain in the proper location
blocks[i:i + 1] = [cur] + chain
# Finally, re-compute the block indexes
for i in range(len(blocks)):
index[blocks[i]] = i
def fixupOrderForward(self, blocks, default_next):
"""Make sure all JUMP_FORWARDs jump forward"""
index = {}
chains = []
cur = []
for b in blocks:
index[b] = len(chains)
cur.append(b)
if b.next and b.next[0] == default_next:
chains.append(cur)
cur = []
chains.append(cur)
while 1:
constraints = []
for i in range(len(chains)):
l = chains[i]
for b in l:
for c in b.get_children():
if index[c] < i:
forward_p = 0
for inst in b.insts:
if inst[0] == 'JUMP_FORWARD':
if inst[1] == c:
forward_p = 1
if not forward_p:
continue
constraints.append((index[c], i))
if not constraints:
break
# XXX just do one for now
# do swaps to get things in the right order
goes_before, a_chain = constraints[0]
assert a_chain > goes_before
c = chains[a_chain]
chains.remove(c)
chains.insert(goes_before, c)
del blocks[:]
for c in chains:
for b in c:
blocks.append(b)
def getBlocks(self):
return self.blocks.elements()
def getRoot(self):
"""Return nodes appropriate for use with dominator"""
return self.entry
def getContainedGraphs(self):
l = []
for b in self.getBlocks():
l.extend(b.getContainedGraphs())
return l
def dfs_postorder(b, seen):
"""Depth-first search of tree rooted at b, return in postorder"""
order = []
seen[b] = b
for c in b.get_children():
if c in seen:
continue
order = order + dfs_postorder(c, seen)
order.append(b)
return order
class Block:
_count = 0
def __init__(self, label=''):
self.insts = []
self.inEdges = misc.Set()
self.outEdges = misc.Set()
self.label = label
self.bid = Block._count
self.next = []
Block._count = Block._count + 1
def __repr__(self):
if self.label:
return "<block %s id=%d>" % (self.label, self.bid)
else:
return "<block id=%d>" % (self.bid)
def __str__(self):
insts = map(str, self.insts)
return "<block %s %d:\n%s>" % (self.label, self.bid,
'\n'.join(insts))
def emit(self, inst):
op = inst[0]
if op[:4] == 'JUMP':
self.outEdges.add(inst[1])
self.insts.append(inst)
def getInstructions(self):
return self.insts
def addInEdge(self, block):
self.inEdges.add(block)
def addOutEdge(self, block):
self.outEdges.add(block)
def addNext(self, block):
self.next.append(block)
assert len(self.next) == 1, map(str, self.next)
_uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
def pruneNext(self):
"""Remove bogus edge for unconditional transfers
Each block has a next edge that accounts for implicit control
transfers, e.g. from a JUMP_IF_FALSE to the block that will be
executed if the test is true.
These edges must remain for the current assembler code to
work. If they are removed, the dfs_postorder gets things in
weird orders. However, they shouldn't be there for other
purposes, e.g. conversion to SSA form. This method will
remove the next edge when it follows an unconditional control
transfer.
"""
try:
op, arg = self.insts[-1]
except (IndexError, ValueError):
return
if op in self._uncond_transfer:
self.next = []
def get_children(self):
if self.next and self.next[0] in self.outEdges:
self.outEdges.remove(self.next[0])
return self.outEdges.elements() + self.next
def getContainedGraphs(self):
"""Return all graphs contained within this block.
For example, a MAKE_FUNCTION block will contain a reference to
the graph for the function body.
"""
contained = []
for inst in self.insts:
if len(inst) == 1:
continue
op = inst[1]
if hasattr(op, 'graph'):
contained.append(op.graph)
return contained
# flags for code objects
# the FlowGraph is transformed in place; it exists in one of these states
RAW = "RAW"
FLAT = "FLAT"
CONV = "CONV"
DONE = "DONE"
class PyFlowGraph(FlowGraph):
super_init = FlowGraph.__init__
def __init__(self, name, filename,
args=(), kwonlyargs=(), optimized=0, klass=None):
self.super_init()
self.name = name
self.filename = filename
self.docstring = None
self.args = args # XXX
self.argcount = getArgCount(args)
self.kwonlyargs = kwonlyargs
self.klass = klass
if optimized:
self.flags = CO_OPTIMIZED | CO_NEWLOCALS
else:
self.flags = 0
self.consts = []
self.names = []
# Free variables found by the symbol table scan, including
# variables used only in nested scopes, are included here.
self.freevars = []
self.cellvars = []
# The closure list is used to track the order of cell
# variables and free variables in the resulting code object.
# The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
# kinds of variables.
self.closure = []
# The varnames list needs to be computed after flags have been set
self.varnames = []
self.stage = RAW
def computeVarnames(self):
# self.args is positional, vararg, kwarg, kwonly, unpacked. This
# order is due to the visit order in symbol module and could change.
# argcount is # len(self.args) - len(unpacked). We want
# self.varnames to be positional, kwonly, vararg, kwarg, unpacked
# and argcount to be len(positional).
# determine starting index of unpacked, kwonly, vararg
u = self.argcount # starting index of unpacked
k = u - len(self.kwonlyargs) # starting index of kwonly
v = k - self.checkFlag(CO_VARARGS) - self.checkFlag(CO_VARKEYWORDS)
vars = list(self.args)
self.varnames = vars[:v] + vars[k:u] + vars[v:k] + vars[u:]
self.argcount = v
# replace TupleArgs with calculated var name
for i in range(self.argcount):
var = self.varnames[i]
if isinstance(var, TupleArg):
self.varnames[i] = var.getName()
def setDocstring(self, doc):
self.docstring = doc
def setFlag(self, flag):
self.flags = self.flags | flag
def checkFlag(self, flag):
return (self.flags & flag) == flag
def setFreeVars(self, names):
self.freevars = list(names)
def setCellVars(self, names):
self.cellvars = names
def getCode(self):
"""Get a Python code object"""
assert self.stage == RAW
self.computeVarnames()
self.computeStackDepth()
self.flattenGraph()
assert self.stage == FLAT
self.convertArgs()
assert self.stage == CONV
self.makeByteCode()
assert self.stage == DONE
return self.newCodeObject()
def dump(self, io=None):
if io:
save = sys.stdout
sys.stdout = io
pc = 0
for t in self.insts:
opname = t[0]
if opname == "SET_LINENO":
print()
if len(t) == 1:
print("\t", "%3d" % pc, opname)
pc = pc + 1
else:
print("\t", "%3d" % pc, opname, t[1])
pc = pc + 3
if io:
sys.stdout = save
def computeStackDepth(self):
"""Compute the max stack depth.
Approach is to compute the stack effect of each basic block.
Then find the path through the code with the largest total
effect.
"""
depth = {}
exit = None
for b in self.getBlocks():
depth[b] = findDepth(b.getInstructions())
seen = {}
def max_depth(b, d):
if b in seen:
return d
seen[b] = 1
d = d + depth[b]
children = b.get_children()
if children:
return max([max_depth(c, d) for c in children])
else:
if not b.label == "exit":
return max_depth(self.exit, d)
else:
return d
self.stacksize = max_depth(self.entry, 0)
def flattenGraph(self):
"""Arrange the blocks in order and resolve jumps"""
assert self.stage == RAW
self.insts = insts = []
pc = 0
begin = {}
end = {}
for b in self.getBlocksInOrder():
begin[b] = pc
for inst in b.getInstructions():
insts.append(inst)
if len(inst) == 1:
pc = pc + 1
elif inst[0] != "SET_LINENO":
# arg takes 2 bytes
pc = pc + 3
end[b] = pc
pc = 0
for i in range(len(insts)):
inst = insts[i]
if len(inst) == 1:
pc = pc + 1
elif inst[0] != "SET_LINENO":
pc = pc + 3
opname = inst[0]
if self.hasjrel.has_elt(opname):
oparg = inst[1]
offset = begin[oparg] - pc
insts[i] = opname, offset
elif self.hasjabs.has_elt(opname):
insts[i] = opname, begin[inst[1]]
self.stage = FLAT
hasjrel = misc.Set()
for i in dis.hasjrel:
hasjrel.add(dis.opname[i])
hasjabs = misc.Set()
for i in dis.hasjabs:
hasjabs.add(dis.opname[i])
def convertArgs(self):
"""Convert arguments from symbolic to concrete form"""
assert self.stage == FLAT
self.consts.insert(0, self.docstring)
self.sort_cellvars()
for i in range(len(self.insts)):
t = self.insts[i]
if len(t) == 2:
opname, oparg = t
conv = self._converters.get(opname, None)
if conv:
self.insts[i] = opname, conv(self, oparg)
self.stage = CONV
def sort_cellvars(self):
"""Sort cellvars in the order of varnames and prune from freevars.
"""
cells = {}
for name in self.cellvars:
cells[name] = 1
self.cellvars = [name for name in self.varnames
if name in cells]
for name in self.cellvars:
del cells[name]
self.cellvars = self.cellvars + list(cells.keys())
self.closure = self.cellvars + self.freevars
def _lookupName(self, name, list):
"""Return index of name in list, appending if necessary
This routine uses a list instead of a dictionary, because a
dictionary can't store two different keys if the keys have the
same value but different types, e.g. 2 and 2L. The compiler
must treat these two separately, so it does an explicit type
comparison before comparing the values.
"""
t = type(name)
for i in range(len(list)):
if t == type(list[i]) and list[i] == name:
return i
end = len(list)
list.append(name)
return end
_converters = {}
def _convert_LOAD_CONST(self, arg):
if hasattr(arg, 'getCode'):
arg = arg.getCode()
return self._lookupName(arg, self.consts)
def _convert_LOAD_FAST(self, arg):
self._lookupName(arg, self.names)
return self._lookupName(arg, self.varnames)
_convert_STORE_FAST = _convert_LOAD_FAST
_convert_DELETE_FAST = _convert_LOAD_FAST
def _convert_LOAD_NAME(self, arg):
if self.klass is None:
self._lookupName(arg, self.varnames)
return self._lookupName(arg, self.names)
def _convert_NAME(self, arg):
if self.klass is None:
self._lookupName(arg, self.varnames)
return self._lookupName(arg, self.names)
_convert_STORE_NAME = _convert_NAME
_convert_DELETE_NAME = _convert_NAME
_convert_IMPORT_NAME = _convert_NAME
_convert_IMPORT_FROM = _convert_NAME
_convert_STORE_ATTR = _convert_NAME
_convert_LOAD_ATTR = _convert_NAME
_convert_DELETE_ATTR = _convert_NAME
_convert_LOAD_GLOBAL = _convert_NAME
_convert_STORE_GLOBAL = _convert_NAME
_convert_DELETE_GLOBAL = _convert_NAME
def _convert_DEREF(self, arg):
self._lookupName(arg, self.names)
self._lookupName(arg, self.varnames)
return self._lookupName(arg, self.closure)
_convert_LOAD_DEREF = _convert_DEREF
_convert_STORE_DEREF = _convert_DEREF
def _convert_LOAD_CLOSURE(self, arg):
self._lookupName(arg, self.varnames)
return self._lookupName(arg, self.closure)
_cmp = list(dis.cmp_op)
def _convert_COMPARE_OP(self, arg):
return self._cmp.index(arg)
# similarly for other opcodes...
for name, obj in list(locals().items()):
if name[:9] == "_convert_":
opname = name[9:]
_converters[opname] = obj
del name, obj, opname
def makeByteCode(self):
assert self.stage == CONV
self.lnotab = lnotab = LineAddrTable()
for t in self.insts:
opname = t[0]
if len(t) == 1:
lnotab.addCode(self.opnum[opname])
else:
oparg = t[1]
if opname == "SET_LINENO":
lnotab.nextLine(oparg)
continue
hi, lo = twobyte(oparg)
extended, hi = twobyte(hi)
if extended:
ehi, elo = twobyte(extended)
lnotab.addCode(self.opnum['EXTENDED_ARG'], elo, ehi)
try:
lnotab.addCode(self.opnum[opname], lo, hi)
except ValueError:
print(opname, oparg)
print(self.opnum[opname], lo, hi)
raise
self.stage = DONE
opnum = {}
for num in range(len(dis.opname)):
opnum[dis.opname[num]] = num
del num
def newCodeObject(self):
assert self.stage == DONE
if (self.flags & CO_NEWLOCALS) == 0:
nlocals = 0
else:
nlocals = len(self.varnames)
argcount = self.argcount
kwonlyargcount = len(self.kwonlyargs)
return new.code(argcount, kwonlyargcount,
nlocals, self.stacksize, self.flags,
self.lnotab.getCode(), self.getConsts(),
tuple(self.names), tuple(self.varnames),
self.filename, self.name, self.lnotab.firstline,
self.lnotab.getTable(), tuple(self.freevars),
tuple(self.cellvars))
def getConsts(self):
"""Return a tuple for the const slot of the code object
Must convert references to code (MAKE_FUNCTION) to code
objects recursively.
"""
l = []
for elt in self.consts:
if isinstance(elt, PyFlowGraph):
elt = elt.getCode()
l.append(elt)
return tuple(l)
def isJump(opname):
if opname[:4] == 'JUMP':
return 1
class TupleArg:
"""Helper for marking func defs with nested tuples in arglist"""
def __init__(self, count, names):
self.count = count
self.names = names
def __repr__(self):
return "TupleArg(%s, %s)" % (self.count, self.names)
def getName(self):
return ".%d" % self.count
def getArgCount(args):
argcount = len(args)
if args:
for arg in args:
if isinstance(arg, TupleArg):
numNames = len(misc.flatten(arg.names))
argcount = argcount - numNames
return argcount
def twobyte(val):
"""Convert an int argument into high and low bytes"""
assert isinstance(val, int)
return divmod(val, 256)
class LineAddrTable:
"""lnotab
This class builds the lnotab, which is documented in compile.c.
Here's a brief recap:
For each SET_LINENO instruction after the first one, two bytes are
added to lnotab. (In some cases, multiple two-byte entries are
added.) The first byte is the distance in bytes between the
instruction for the last SET_LINENO and the current SET_LINENO.
The second byte is offset in line numbers. If either offset is
greater than 255, multiple two-byte entries are added -- see
compile.c for the delicate details.
"""
def __init__(self):
self.code = []
self.codeOffset = 0
self.firstline = 0
self.lastline = 0
self.lastoff = 0
self.lnotab = []
def addCode(self, *args):
for arg in args:
self.code.append(chr(arg))
self.codeOffset = self.codeOffset + len(args)
def nextLine(self, lineno):
if self.firstline == 0:
self.firstline = lineno
self.lastline = lineno
else:
# compute deltas
addr = self.codeOffset - self.lastoff
line = lineno - self.lastline
# Python assumes that lineno always increases with
# increasing bytecode address (lnotab is unsigned char).
# Depending on when SET_LINENO instructions are emitted
# this is not always true. Consider the code:
# a = (1,
# b)
# In the bytecode stream, the assignment to "a" occurs
# after the loading of "b". This works with the C Python
# compiler because it only generates a SET_LINENO instruction
# for the assignment.
if line >= 0:
push = self.lnotab.append
while addr > 255:
push(255); push(0)
addr -= 255
while line > 255:
push(addr); push(255)
line -= 255
addr = 0
if addr > 0 or line > 0:
push(addr); push(line)
self.lastline = lineno
self.lastoff = self.codeOffset
def getCode(self):
return ''.join(self.code)
def getTable(self):
return ''.join(map(chr, self.lnotab))
class StackDepthTracker:
# XXX 1. need to keep track of stack depth on jumps
# XXX 2. at least partly as a result, this code is broken
def findDepth(self, insts, debug=0):
depth = 0
maxDepth = 0
for i in insts:
opname = i[0]
if debug:
print(i, end=' ')
delta = self.effect.get(opname, None)
if delta is not None:
depth = depth + delta
else:
# now check patterns
for pat, pat_delta in self.patterns:
if opname[:len(pat)] == pat:
delta = pat_delta
depth = depth + delta
break
# if we still haven't found a match
if delta is None:
meth = getattr(self, opname, None)
if meth is not None:
depth = depth + meth(i[1])
if depth > maxDepth:
maxDepth = depth
if debug:
print(depth, maxDepth)
return maxDepth
effect = {
'POP_TOP': -1,
'DUP_TOP': 1,
'LIST_APPEND': -2,
'SLICE+1': -1,
'SLICE+2': -1,
'SLICE+3': -2,
'STORE_SLICE+0': -1,
'STORE_SLICE+1': -2,
'STORE_SLICE+2': -2,
'STORE_SLICE+3': -3,
'DELETE_SLICE+0': -1,
'DELETE_SLICE+1': -2,
'DELETE_SLICE+2': -2,
'DELETE_SLICE+3': -3,
'STORE_SUBSCR': -3,
'DELETE_SUBSCR': -2,
'PRINT_EXPR': -1,
'RETURN_VALUE': -1,
'YIELD_VALUE': -1,
'STORE_NAME': -1,
'STORE_ATTR': -2,
'DELETE_ATTR': -1,
'STORE_GLOBAL': -1,
'BUILD_MAP': 1,
'MAKE_BYTES': 0,
'COMPARE_OP': -1,
'STORE_FAST': -1,
'IMPORT_STAR': -1,
'IMPORT_NAME': -1,
'IMPORT_FROM': 1,
'LOAD_ATTR': 0, # unlike other loads
# close enough...
'SETUP_EXCEPT': 3,
'SETUP_FINALLY': 3,
'FOR_ITER': 1,
'WITH_CLEANUP': -1,
'LOAD_BUILD_CLASS': 1,
'STORE_LOCALS': -1,
}
# use pattern match
patterns = [
('BINARY_', -1),
('LOAD_', 1),
]
def UNPACK_SEQUENCE(self, count):
return count-1
def BUILD_TUPLE(self, count):
return -count+1
def BUILD_LIST(self, count):
return -count+1
def BUILD_SET(self, count):
return -count+1
def CALL_FUNCTION(self, argc):
hi, lo = divmod(argc, 256)
return -(lo + hi * 2)
def CALL_FUNCTION_VAR(self, argc):
return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_KW(self, argc):
return self.CALL_FUNCTION(argc)-1
def CALL_FUNCTION_VAR_KW(self, argc):
return self.CALL_FUNCTION(argc)-2
def MAKE_FUNCTION(self, argc):
hi, lo = divmod(argc, 256)
ehi, hi = divmod(hi, 256)
return -(lo + hi * 2 + ehi)
def MAKE_CLOSURE(self, argc):
# XXX need to account for free variables too!
return -argc
def BUILD_SLICE(self, argc):
if argc == 2:
return -1
elif argc == 3:
return -2
def DUP_TOPX(self, argc):
return argc
findDepth = StackDepthTracker().findDepth

File diff suppressed because it is too large Load Diff

View File

@ -1,470 +0,0 @@
"""Module symbol-table generator"""
from compiler import ast
from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL, SC_UNKNOWN
from compiler.misc import mangle
import types
import sys
MANGLE_LEN = 256
class Scope:
# XXX how much information do I need about each name?
def __init__(self, name, module, klass=None):
self.name = name
self.module = module
self.defs = {}
self.uses = {}
self.globals = {}
self.params = {}
self.frees = {}
self.cells = {}
self.children = []
# nested is true if the class could contain free variables,
# i.e. if it is nested within another function.
self.nested = None
self.generator = None
self.klass = None
if klass is not None:
for i in range(len(klass)):
if klass[i] != '_':
self.klass = klass[i:]
break
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, self.name)
def mangle(self, name):
if self.klass is None:
return name
return mangle(name, self.klass)
def add_def(self, name):
self.defs[self.mangle(name)] = 1
def add_use(self, name):
self.uses[self.mangle(name)] = 1
def add_global(self, name):
name = self.mangle(name)
if name in self.uses or name in self.defs:
pass # XXX warn about global following def/use
if name in self.params:
raise SyntaxError, "%s in %s is global and parameter" % \
(name, self.name)
self.globals[name] = 1
self.module.add_def(name)
def add_param(self, name):
name = self.mangle(name)
self.defs[name] = 1
self.params[name] = 1
def get_names(self):
d = {}
d.update(self.defs)
d.update(self.uses)
d.update(self.globals)
return d.keys()
def add_child(self, child):
self.children.append(child)
def get_children(self):
return self.children
def DEBUG(self):
print(self.name, self.nested and "nested" or "", file=sys.stderr)
print("\tglobals: ", self.globals, file=sys.stderr)
print("\tcells: ", self.cells, file=sys.stderr)
print("\tdefs: ", self.defs, file=sys.stderr)
print("\tuses: ", self.uses, file=sys.stderr)
print("\tfrees:", self.frees, file=sys.stderr)
def check_name(self, name):
"""Return scope of name.
The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
"""
if name in self.globals:
return SC_GLOBAL
if name in self.cells:
return SC_CELL
if name in self.defs:
return SC_LOCAL
if self.nested and (name in self.frees or
name in self.uses):
return SC_FREE
if self.nested:
return SC_UNKNOWN
else:
return SC_GLOBAL
def get_free_vars(self):
if not self.nested:
return ()
free = {}
free.update(self.frees)
for name in self.uses.keys():
if not (name in self.defs or
name in self.globals):
free[name] = 1
return free.keys()
def handle_children(self):
for child in self.children:
frees = child.get_free_vars()
globals = self.add_frees(frees)
for name in globals:
child.force_global(name)
def force_global(self, name):
"""Force name to be global in scope.
Some child of the current node had a free reference to name.
When the child was processed, it was labelled a free
variable. Now that all its enclosing scope have been
processed, the name is known to be a global or builtin. So
walk back down the child chain and set the name to be global
rather than free.
Be careful to stop if a child does not think the name is
free.
"""
self.globals[name] = 1
if name in self.frees:
del self.frees[name]
for child in self.children:
if child.check_name(name) == SC_FREE:
child.force_global(name)
def add_frees(self, names):
"""Process list of free vars from nested scope.
Returns a list of names that are either 1) declared global in the
parent or 2) undefined in a top-level parent. In either case,
the nested scope should treat them as globals.
"""
child_globals = []
for name in names:
sc = self.check_name(name)
if self.nested:
if sc == SC_UNKNOWN or sc == SC_FREE \
or isinstance(self, ClassScope):
self.frees[name] = 1
elif sc == SC_GLOBAL:
child_globals.append(name)
elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
else:
if sc == SC_LOCAL:
self.cells[name] = 1
elif sc != SC_CELL:
child_globals.append(name)
return child_globals
def get_cell_vars(self):
return self.cells.keys()
class ModuleScope(Scope):
__super_init = Scope.__init__
def __init__(self):
self.__super_init("global", self)
class FunctionScope(Scope):
pass
class GenExprScope(Scope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("generator expression<%d>"%i, module, klass)
self.add_param('.0')
def get_names(self):
keys = Scope.get_names(self)
return keys
class LambdaScope(FunctionScope):
__super_init = Scope.__init__
__counter = 1
def __init__(self, module, klass=None):
i = self.__counter
self.__counter += 1
self.__super_init("lambda.%d" % i, module, klass)
class ClassScope(Scope):
__super_init = Scope.__init__
def __init__(self, name, module):
self.__super_init(name, module, name)
class SymbolVisitor:
def __init__(self):
self.scopes = {}
self.klass = None
# node that define new scopes
def visitModule(self, node):
scope = self.module = self.scopes[node] = ModuleScope()
self.visit(node.node, scope)
visitExpression = visitModule
def visitFunction(self, node, parent):
if node.decorators:
self.visit(node.decorators, parent)
parent.add_def(node.name)
for n in node.defaults:
self.visit(n, parent)
scope = FunctionScope(node.name, self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
args = node.arguments
for kwonly in node.kwonlyargs:
args.append(kwonly.arg)
self._do_arguments(scope, args)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExpr(self, node, parent):
scope = GenExprScope(self.module, self.klass);
if parent.nested or isinstance(parent, FunctionScope) \
or isinstance(parent, GenExprScope):
scope.nested = 1
self.scopes[node] = scope
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def visitGenExprInner(self, node, scope):
for genfor in node.quals:
self.visit(genfor, scope)
self.visit(node.expr, scope)
def visitGenExprFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.iter, scope)
for if_ in node.ifs:
self.visit(if_, scope)
def visitGenExprIf(self, node, scope):
self.visit(node.test, scope)
def visitLambda(self, node, parent, assign=0):
# Lambda is an expression, so it could appear in an expression
# context where assign is passed. The transformer should catch
# any code that has a lambda on the left-hand side.
assert not assign
for n in node.defaults:
self.visit(n, parent)
scope = LambdaScope(self.module, self.klass)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
self.scopes[node] = scope
self._do_arguments(scope, node.arguments)
self.visit(node.code, scope)
self.handle_free_vars(scope, parent)
def _do_arguments(self, scope, arguments):
for node in arguments:
if isinstance(node, ast.SimpleArg):
scope.add_param(node.name)
if node.annotation:
self.visit(node.annotation, scope)
else:
self._do_arguments(scope, node.args)
def handle_free_vars(self, scope, parent):
parent.add_child(scope)
scope.handle_children()
def visitClass(self, node, parent):
parent.add_def(node.name)
for n in node.args:
self.visit(n, parent)
scope = ClassScope(node.name, self.module)
if parent.nested or isinstance(parent, FunctionScope):
scope.nested = 1
if node.doc is not None:
scope.add_def('__doc__')
scope.add_def('__module__')
self.scopes[node] = scope
prev = self.klass
self.klass = node.name
self.visit(node.code, scope)
self.klass = prev
self.handle_free_vars(scope, parent)
# name can be a def or a use
# XXX a few calls and nodes expect a third "assign" arg that is
# true if the name is being used as an assignment. only
# expressions contained within statements may have the assign arg.
def visitName(self, node, scope, assign=0):
if assign:
scope.add_def(node.name)
else:
scope.add_use(node.name)
# operations that bind new names
def visitFor(self, node, scope):
self.visit(node.assign, scope, 1)
self.visit(node.list, scope)
self.visit(node.body, scope)
if node.else_:
self.visit(node.else_, scope)
def visitFrom(self, node, scope):
for name, asname in node.names:
if name == "*":
continue
scope.add_def(asname or name)
def visitImport(self, node, scope):
for name, asname in node.names:
i = name.find(".")
if i > -1:
name = name[:i]
scope.add_def(asname or name)
def visitGlobal(self, node, scope):
for name in node.names:
scope.add_global(name)
def visitAssign(self, node, scope):
"""Propagate assignment flag down to child nodes.
The Assign node doesn't itself contains the variables being
assigned to. Instead, the children in node.nodes are visited
with the assign flag set to true. When the names occur in
those nodes, they are marked as defs.
Some names that occur in an assignment target are not bound by
the assignment, e.g. a name occurring inside a slice. The
visitor handles these nodes specially; they do not propagate
the assign flag to their children.
"""
for n in node.nodes:
self.visit(n, scope, 1)
self.visit(node.expr, scope)
def visitAssName(self, node, scope, assign=1):
scope.add_def(node.name)
def visitAssAttr(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
def visitSubscript(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
for n in node.subs:
self.visit(n, scope, 0)
def visitSlice(self, node, scope, assign=0):
self.visit(node.expr, scope, 0)
if node.lower:
self.visit(node.lower, scope, 0)
if node.upper:
self.visit(node.upper, scope, 0)
def visitAugAssign(self, node, scope):
# If the LHS is a name, then this counts as assignment.
# Otherwise, it's just use.
self.visit(node.node, scope)
if isinstance(node.node, ast.Name):
self.visit(node.node, scope, 1) # XXX worry about this
self.visit(node.expr, scope)
# prune if statements if tests are false
_const_types = types.StringType, types.IntType, types.FloatType
def visitIf(self, node, scope):
for test, body in node.tests:
if isinstance(test, ast.Const):
if type(test.value) in self._const_types:
if not test.value:
continue
self.visit(test, scope)
self.visit(body, scope)
if node.else_:
self.visit(node.else_, scope)
# a yield statement signals a generator
def visitYield(self, node, scope):
scope.generator = 1
self.visit(node.value, scope)
def list_eq(l1, l2):
return sorted(l1) == sorted(l2)
if __name__ == "__main__":
import sys
from compiler import parseFile, walk
import symtable
def get_names(syms):
return [s for s in [s.get_name() for s in syms.get_symbols()]
if not (s.startswith('_[') or s.startswith('.'))]
for file in sys.argv[1:]:
print(file)
f = open(file)
buf = f.read()
f.close()
syms = symtable.symtable(buf, file, "exec")
mod_names = get_names(syms)
tree = parseFile(file)
s = SymbolVisitor()
walk(tree, s)
# compare module-level symbols
names2 = s.scopes[tree].get_names()
if not list_eq(mod_names, names2):
print()
print("oops", file)
print(sorted(mod_names))
print(sorted(names2))
sys.exit(-1)
d = {}
d.update(s.scopes)
del d[tree]
scopes = d.values()
del d
for s in syms.get_symbols():
if s.is_namespace():
l = [sc for sc in scopes
if sc.name == s.get_name()]
if len(l) > 1:
print("skipping", s.get_name())
else:
if not list_eq(get_names(s.get_namespace()),
l[0].get_names()):
print(s.get_name())
print(sorted(get_names(s.get_namespace())))
print(sorted(l[0].get_names()))
sys.exit(-1)

View File

@ -1,46 +0,0 @@
"""Check for errs in the AST.
The Python parser does not catch all syntax errors. Others, like
assignments with invalid targets, are caught in the code generation
phase.
The compiler package catches some errors in the transformer module.
But it seems clearer to write checkers that use the AST to detect
errors.
"""
from compiler import ast, walk
def check(tree, multi=None):
v = SyntaxErrorChecker(multi)
walk(tree, v)
return v.errors
class SyntaxErrorChecker:
"""A visitor to find syntax errors in the AST."""
def __init__(self, multi=None):
"""Create new visitor object.
If optional argument multi is not None, then print messages
for each error rather than raising a SyntaxError for the
first.
"""
self.multi = multi
self.errors = 0
def error(self, node, msg):
self.errors = self.errors + 1
if self.multi is not None:
print("%s:%s: %s" % (node.filename, node.lineno, msg))
else:
raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
def visitAssign(self, node):
# the transformer module handles many of these
pass
## for target in node.nodes:
## if isinstance(target, ast.AssList):
## if target.lineno is None:
## target.lineno = node.lineno
## self.error(target, "can't assign to list comprehension")

File diff suppressed because it is too large Load Diff

View File

@ -1,113 +0,0 @@
from compiler import ast
# XXX should probably rename ASTVisitor to ASTWalker
# XXX can it be made even more generic?
class ASTVisitor:
"""Performs a depth-first walk of the AST
The ASTVisitor will walk the AST, performing either a preorder or
postorder traversal depending on which method is called.
methods:
preorder(tree, visitor)
postorder(tree, visitor)
tree: an instance of ast.Node
visitor: an instance with visitXXX methods
The ASTVisitor is responsible for walking over the tree in the
correct order. For each node, it checks the visitor argument for
a method named 'visitNodeType' where NodeType is the name of the
node's class, e.g. Class. If the method exists, it is called
with the node as its sole argument.
The visitor method for a particular node type can control how
child nodes are visited during a preorder walk. (It can't control
the order during a postorder walk, because it is called _after_
the walk has occurred.) The ASTVisitor modifies the visitor
argument by adding a visit method to the visitor; this method can
be used to visit a child node of arbitrary type.
"""
VERBOSE = 0
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in node.getChildNodes():
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass, None)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
## if self.VERBOSE > 0:
## className = klass.__name__
## if self.VERBOSE == 1:
## if meth == 0:
## print "dispatch", className
## else:
## print "dispatch", className, (meth and meth.__name__ or '')
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args) # XXX *args make sense?
class ExampleASTVisitor(ASTVisitor):
"""Prints examples of the nodes that aren't visited
This visitor-driver is only useful for development, when it's
helpful to develop a visitor incrementally, and get feedback on what
you still have to do.
"""
examples = {}
def dispatch(self, node, *args):
self.node = node
meth = self._cache.get(node.__class__, None)
className = node.__class__.__name__
if meth is None:
meth = getattr(self.visitor, 'visit' + className, 0)
self._cache[node.__class__] = meth
if self.VERBOSE > 1:
print("dispatch", className, (meth and meth.__name__ or ''))
if meth:
meth(node, *args)
elif self.VERBOSE > 0:
klass = node.__class__
if klass not in self.examples:
self.examples[klass] = klass
print()
print(self.visitor)
print(klass)
for attr in dir(node):
if attr[0] != '_':
print("\t", "%-12.12s" % attr, getattr(node, attr))
print()
return self.default(node, *args)
# XXX this is an API change
_walker = ASTVisitor
def walk(tree, visitor, walker=None, verbose=None):
if walker is None:
walker = _walker()
if verbose is not None:
walker.VERBOSE = verbose
walker.preorder(tree, visitor)
return walker.visitor
def dumpNode(node):
print(node.__class__)
for attr in dir(node):
if attr[0] != '_':
print("\t", "%-10.10s" % attr, getattr(node, attr))

View File

@ -8,7 +8,7 @@ module searching and loading algorithm, and it is possible to replace
the built-in function __import__ in order to change the semantics of
the import statement, until now it has been difficult to combine the
effect of different __import__ hacks, like loading modules from URLs
by rimport.py, or restricted execution by rexec.py.
by rimport.py.
This module defines three new concepts:

View File

@ -674,7 +674,6 @@ def _test_revamp():
# push MAL's mapper into sys.path[0] as a cache (hard-coded for apps)
#
# from Guido:
# need to change sys.* references for rexec environs
# need hook for MAL's walk-me-up import strategy, or Tim's absolute strategy
# watch out for sys.modules[...] is None
# flag to force absolute imports? (speeds _determine_import_context and
@ -714,7 +713,7 @@ def _test_revamp():
# > However, we still have a tension occurring here:
# >
# > 1) implementing policy in ImportManager assists in single-point policy
# > changes for app/rexec situations
# > changes for app situations
# > 2) implementing policy in Importer assists in package-private policy
# > changes for normal, operating conditions
# >

View File

@ -161,7 +161,6 @@ def isframe(object):
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
f_restricted 0 or 1 if frame is in restricted execution mode
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
@ -818,10 +817,10 @@ def formatannotation(annotation, base_module=None):
return repr(annotation)
def formatannotationrelativeto(object):
module = getattr(object, '__module__', None)
def _formatannotation(annotation):
return formatannotation(annotation, module)
return _formatannotation
module = getattr(object, '__module__', None)
def _formatannotation(annotation):
return formatannotation(annotation, module)
return _formatannotation
def formatargspec(args, varargs=None, varkw=None, defaults=None,
kwonlyargs=(), kwonlydefaults={}, annotations={},

View File

@ -1,10 +0,0 @@
# $Id$
#
# Copyright (C) 2005 Gregory P. Smith (greg@electricrain.com)
# Licensed to PSF under a Contributor Agreement.
from hashlib import md5
new = md5
blocksize = 1 # legacy value (wrong in any useful sense)
digest_size = 16

View File

@ -8,9 +8,4 @@ from types import ClassType as classobj
from types import FunctionType as function
from types import MethodType as instancemethod
from types import ModuleType as module
# CodeType is not accessible in restricted execution mode
try:
from types import CodeType as code
except ImportError:
pass
from types import CodeType as code

View File

@ -1009,14 +1009,9 @@ class Unpickler:
if (not args and
type(klass) is ClassType and
not hasattr(klass, "__getinitargs__")):
try:
value = _EmptyClass()
value.__class__ = klass
instantiated = 1
except RuntimeError:
# In restricted execution, assignment to inst.__class__ is
# prohibited
pass
value = _EmptyClass()
value.__class__ = klass
instantiated = 1
if not instantiated:
try:
value = klass(*args)
@ -1184,20 +1179,7 @@ class Unpickler:
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if state:
try:
inst.__dict__.update(state)
except RuntimeError:
# XXX In restricted execution, the instance's __dict__
# is not accessible. Use the old way of unpickling
# the instance variables. This is a semantic
# difference when unpickling in restricted
# vs. unrestricted modes.
# Note, however, that cPickle has never tried to do the
# .update() business, and always uses
# PyObject_SetItem(inst.__dict__, key, value) in a
# loop over state.items().
for k, v in state.items():
setattr(inst, k, v)
inst.__dict__.update(state)
if slotstate:
for k, v in slotstate.items():
setattr(inst, k, v)

View File

@ -1562,13 +1562,6 @@ opcodes = [
the object is updated via
anyobject.__dict__.update(argument)
This may raise RuntimeError in restricted execution mode (which
disallows access to __dict__ directly); in that case, the object
is updated instead via
for k, v in argument.items():
anyobject[k] = v
"""),
I(name='INST',
@ -1604,11 +1597,7 @@ opcodes = [
calling __init__() is current wisdom). In this case, an instance of
an old-style dummy class is created, and then we try to rebind its
__class__ attribute to the desired class object. If this succeeds,
the new instance object is pushed on the stack, and we're done. In
restricted execution mode it can fail (assignment to __class__ is
disallowed), and I'm not really sure what happens then -- it looks
like the code ends up calling the class object's __init__ anyway,
via falling into the next case.
the new instance object is pushed on the stack, and we're done.
Else (the argtuple is not empty, it's not an old-style class object,
or the class object does have a __getinitargs__ attribute), the code

View File

@ -1,111 +0,0 @@
# Implement 'jpeg' interface using SGI's compression library
# XXX Options 'smooth' and 'optimize' are ignored.
# XXX It appears that compressing grayscale images doesn't work right;
# XXX the resulting file causes weirdness.
class error(Exception):
pass
options = {'quality': 75, 'optimize': 0, 'smooth': 0, 'forcegray': 0}
comp = None
decomp = None
def compress(imgdata, width, height, bytesperpixel):
global comp
import cl
if comp is None: comp = cl.OpenCompressor(cl.JPEG)
if bytesperpixel == 1:
format = cl.GRAYSCALE
elif bytesperpixel == 4:
format = cl.RGBX
if options['forcegray']:
iformat = cl.GRAYSCALE
else:
iformat = cl.YUV
# XXX How to support 'optimize'?
params = [cl.IMAGE_WIDTH, width, cl.IMAGE_HEIGHT, height,
cl.ORIGINAL_FORMAT, format,
cl.ORIENTATION, cl.BOTTOM_UP,
cl.QUALITY_FACTOR, options['quality'],
cl.INTERNAL_FORMAT, iformat,
]
comp.SetParams(params)
jpegdata = comp.Compress(1, imgdata)
return jpegdata
def decompress(jpegdata):
global decomp
import cl
if decomp is None: decomp = cl.OpenDecompressor(cl.JPEG)
headersize = decomp.ReadHeader(jpegdata)
params = [cl.IMAGE_WIDTH, 0, cl.IMAGE_HEIGHT, 0, cl.INTERNAL_FORMAT, 0]
decomp.GetParams(params)
width, height, format = params[1], params[3], params[5]
if format == cl.GRAYSCALE or options['forcegray']:
format = cl.GRAYSCALE
bytesperpixel = 1
else:
format = cl.RGBX
bytesperpixel = 4
# XXX How to support 'smooth'?
params = [cl.ORIGINAL_FORMAT, format,
cl.ORIENTATION, cl.BOTTOM_UP,
cl.FRAME_BUFFER_SIZE, width*height*bytesperpixel]
decomp.SetParams(params)
imgdata = decomp.Decompress(1, jpegdata)
return imgdata, width, height, bytesperpixel
def setoption(name, value):
if type(value) is not type(0):
raise TypeError, 'jpeg.setoption: numeric options only'
if name == 'forcegrey':
name = 'forcegray'
if not options.has_key(name):
raise KeyError, 'jpeg.setoption: unknown option name'
options[name] = int(value)
def test():
import sys
if sys.argv[1:2] == ['-g']:
del sys.argv[1]
setoption('forcegray', 1)
if not sys.argv[1:]:
sys.argv.append('/usr/local/images/data/jpg/asterix.jpg')
for file in sys.argv[1:]:
show(file)
def show(file):
import gl, GL, DEVICE
jpegdata = open(file, 'r').read()
imgdata, width, height, bytesperpixel = decompress(jpegdata)
gl.foreground()
gl.prefsize(width, height)
win = gl.winopen(file)
if bytesperpixel == 1:
gl.cmode()
gl.pixmode(GL.PM_SIZE, 8)
gl.gconfig()
for i in range(256):
gl.mapcolor(i, i, i, i)
else:
gl.RGBmode()
gl.pixmode(GL.PM_SIZE, 32)
gl.gconfig()
gl.qdevice(DEVICE.REDRAW)
gl.qdevice(DEVICE.ESCKEY)
gl.qdevice(DEVICE.WINQUIT)
gl.qdevice(DEVICE.WINSHUT)
gl.lrectwrite(0, 0, width-1, height-1, imgdata)
while 1:
dev, val = gl.qread()
if dev in (DEVICE.ESCKEY, DEVICE.WINSHUT, DEVICE.WINQUIT):
break
if dev == DEVICE.REDRAW:
gl.lrectwrite(0, 0, width-1, height-1, imgdata)
gl.winclose(win)
# Now test the compression and write the result to a fixed filename
newjpegdata = compress(imgdata, width, height, bytesperpixel)
open('/tmp/j.jpg', 'w').write(newjpegdata)

View File

@ -1,281 +0,0 @@
# Module 'panel'
#
# Support for the Panel library.
# Uses built-in module 'pnl'.
# Applications should use 'panel.function' instead of 'pnl.function';
# most 'pnl' functions are transparently exported by 'panel',
# but dopanel() is overridden and you have to use this version
# if you want to use callbacks.
import pnl
debug = 0
# Test if an object is a list.
#
def is_list(x):
return type(x) == type([])
# Reverse a list.
#
def reverse(list):
res = []
for item in list:
res.insert(0, item)
return res
# Get an attribute of a list, which may itself be another list.
# Don't use 'prop' for name.
#
def getattrlist(list, name):
for item in list:
if item and is_list(item) and item[0] == name:
return item[1:]
return []
# Get a property of a list, which may itself be another list.
#
def getproplist(list, name):
for item in list:
if item and is_list(item) and item[0] == 'prop':
if len(item) > 1 and item[1] == name:
return item[2:]
return []
# Test if an actuator description contains the property 'end-of-group'
#
def is_endgroup(list):
x = getproplist(list, 'end-of-group')
return (x and x[0] == '#t')
# Neatly display an actuator definition given as S-expression
# the prefix string is printed before each line.
#
def show_actuator(prefix, a):
for item in a:
if not is_list(item):
print(prefix, item)
elif item and item[0] == 'al':
print(prefix, 'Subactuator list:')
for a in item[1:]:
show_actuator(prefix + ' ', a)
elif len(item) == 2:
print(prefix, item[0], '=>', item[1])
elif len(item) == 3 and item[0] == 'prop':
print(prefix, 'Prop', item[1], '=>', end=' ')
print(item[2])
else:
print(prefix, '?', item)
# Neatly display a panel.
#
def show_panel(prefix, p):
for item in p:
if not is_list(item):
print(prefix, item)
elif item and item[0] == 'al':
print(prefix, 'Actuator list:')
for a in item[1:]:
show_actuator(prefix + ' ', a)
elif len(item) == 2:
print(prefix, item[0], '=>', item[1])
elif len(item) == 3 and item[0] == 'prop':
print(prefix, 'Prop', item[1], '=>', end=' ')
print(item[2])
else:
print(prefix, '?', item)
# Exception raised by build_actuator or build_panel.
#
panel_error = 'panel error'
# Dummy callback used to initialize the callbacks.
#
def dummy_callback(arg):
pass
# Assign attributes to members of the target.
# Attribute names in exclist are ignored.
# The member name is the attribute name prefixed with the prefix.
#
def assign_members(target, attrlist, exclist, prefix):
for item in attrlist:
if is_list(item) and len(item) == 2 and item[0] not in exclist:
name, value = item[0], item[1]
ok = 1
if value[0] in '-0123456789':
value = eval(value)
elif value[0] == '"':
value = value[1:-1]
elif value == 'move-then-resize':
# Strange default set by Panel Editor...
ok = 0
else:
print('unknown value', value, 'for', name)
ok = 0
if ok:
lhs = 'target.' + prefix + name
stmt = lhs + '=' + repr(value)
if debug: print('exec', stmt)
try:
exec(stmt + '\n')
except KeyboardInterrupt: # Don't catch this!
raise KeyboardInterrupt
except:
print('assign failed:', stmt)
# Build a real actuator from an actuator description.
# Return a pair (actuator, name).
#
def build_actuator(descr):
namelist = getattrlist(descr, 'name')
if namelist:
# Assume it is a string
actuatorname = namelist[0][1:-1]
else:
actuatorname = ''
type = descr[0]
if type[:4] == 'pnl_': type = type[4:]
act = pnl.mkact(type)
act.downfunc = act.activefunc = act.upfunc = dummy_callback
#
assign_members(act, descr[1:], ['al', 'data', 'name'], '')
#
# Treat actuator-specific data
#
datalist = getattrlist(descr, 'data')
prefix = ''
if type[-4:] == 'puck':
prefix = 'puck_'
elif type == 'mouse':
prefix = 'mouse_'
assign_members(act, datalist, [], prefix)
#
return act, actuatorname
# Build all sub-actuators and add them to the super-actuator.
# The super-actuator must already have been added to the panel.
# Sub-actuators with defined names are added as members to the panel
# so they can be referenced as p.name.
#
# Note: I have no idea how panel.endgroup() works when applied
# to a sub-actuator.
#
def build_subactuators(panel, super_act, al):
#
# This is nearly the same loop as below in build_panel(),
# except a call is made to addsubact() instead of addact().
#
for a in al:
act, name = build_actuator(a)
act.addsubact(super_act)
if name:
stmt = 'panel.' + name + ' = act'
if debug: print('exec', stmt)
exec(stmt + '\n')
if is_endgroup(a):
panel.endgroup()
sub_al = getattrlist(a, 'al')
if sub_al:
build_subactuators(panel, act, sub_al)
#
# Fix the actuator to which whe just added subactuators.
# This can't hurt (I hope) and is needed for the scroll actuator.
#
super_act.fixact()
# Build a real panel from a panel definition.
# Return a panel object p, where for each named actuator a, p.name is a
# reference to a.
#
def build_panel(descr):
#
# Sanity check
#
if (not descr) or descr[0] != 'panel':
raise panel_error, 'panel description must start with "panel"'
#
if debug: show_panel('', descr)
#
# Create an empty panel
#
panel = pnl.mkpanel()
#
# Assign panel attributes
#
assign_members(panel, descr[1:], ['al'], '')
#
# Look for actuator list
#
al = getattrlist(descr, 'al')
#
# The order in which actuators are created is important
# because of the endgroup() operator.
# Unfortunately the Panel Editor outputs the actuator list
# in reverse order, so we reverse it here.
#
al = reverse(al)
#
for a in al:
act, name = build_actuator(a)
act.addact(panel)
if name:
stmt = 'panel.' + name + ' = act'
exec(stmt + '\n')
if is_endgroup(a):
panel.endgroup()
sub_al = getattrlist(a, 'al')
if sub_al:
build_subactuators(panel, act, sub_al)
#
return panel
# Wrapper around pnl.dopanel() which calls call-back functions.
#
def my_dopanel():
# Extract only the first 4 elements to allow for future expansion
a, down, active, up = pnl.dopanel()[:4]
if down:
down.downfunc(down)
if active:
active.activefunc(active)
if up:
up.upfunc(up)
return a
# Create one or more panels from a description file (S-expressions)
# generated by the Panel Editor.
#
def defpanellist(file):
import panelparser
descrlist = panelparser.parse_file(open(file, 'r'))
panellist = []
for descr in descrlist:
panellist.append(build_panel(descr))
return panellist
# Import everything from built-in method pnl, so the user can always
# use panel.foo() instead of pnl.foo().
# This gives *no* performance penalty once this module is imported.
#
from pnl import * # for export
dopanel = my_dopanel # override pnl.dopanel

View File

@ -1,128 +0,0 @@
# Module 'parser'
#
# Parse S-expressions output by the Panel Editor
# (which is written in Scheme so it can't help writing S-expressions).
#
# See notes at end of file.
whitespace = ' \t\n'
operators = '()\''
separators = operators + whitespace + ';' + '"'
# Tokenize a string.
# Return a list of tokens (strings).
#
def tokenize_string(s):
tokens = []
while s:
c = s[:1]
if c in whitespace:
s = s[1:]
elif c == ';':
s = ''
elif c == '"':
n = len(s)
i = 1
while i < n:
c = s[i]
i = i+1
if c == '"': break
if c == '\\': i = i+1
tokens.append(s[:i])
s = s[i:]
elif c in operators:
tokens.append(c)
s = s[1:]
else:
n = len(s)
i = 1
while i < n:
if s[i] in separators: break
i = i+1
tokens.append(s[:i])
s = s[i:]
return tokens
# Tokenize a whole file (given as file object, not as file name).
# Return a list of tokens (strings).
#
def tokenize_file(fp):
tokens = []
while 1:
line = fp.readline()
if not line: break
tokens = tokens + tokenize_string(line)
return tokens
# Exception raised by parse_exr.
#
syntax_error = 'syntax error'
# Parse an S-expression.
# Input is a list of tokens as returned by tokenize_*().
# Return a pair (expr, tokens)
# where expr is a list representing the s-expression,
# and tokens contains the remaining tokens.
# May raise syntax_error.
#
def parse_expr(tokens):
if (not tokens) or tokens[0] != '(':
raise syntax_error, 'expected "("'
tokens = tokens[1:]
expr = []
while 1:
if not tokens:
raise syntax_error, 'missing ")"'
if tokens[0] == ')':
return expr, tokens[1:]
elif tokens[0] == '(':
subexpr, tokens = parse_expr(tokens)
expr.append(subexpr)
else:
expr.append(tokens[0])
tokens = tokens[1:]
# Parse a file (given as file object, not as file name).
# Return a list of parsed S-expressions found at the top level.
#
def parse_file(fp):
tokens = tokenize_file(fp)
exprlist = []
while tokens:
expr, tokens = parse_expr(tokens)
exprlist.append(expr)
return exprlist
# EXAMPLE:
#
# The input
# '(hip (hop hur-ray))'
#
# passed to tokenize_string() returns the token list
# ['(', 'hip', '(', 'hop', 'hur-ray', ')', ')']
#
# When this is passed to parse_expr() it returns the expression
# ['hip', ['hop', 'hur-ray']]
# plus an empty token list (because there are no tokens left.
#
# When a file containing the example is passed to parse_file() it returns
# a list whose only element is the output of parse_expr() above:
# [['hip', ['hop', 'hur-ray']]]
# TOKENIZING:
#
# Comments start with semicolon (;) and continue till the end of the line.
#
# Tokens are separated by whitespace, except the following characters
# always form a separate token (outside strings):
# ( ) '
# Strings are enclosed in double quotes (") and backslash (\) is used
# as escape character in strings.

View File

@ -1,585 +0,0 @@
"""Restricted execution facilities.
The class RExec exports methods r_exec(), r_eval(), r_execfile(), and
r_import(), which correspond roughly to the built-in operations
exec, eval(), execfile() and import, but executing the code in an
environment that only exposes those built-in operations that are
deemed safe. To this end, a modest collection of 'fake' modules is
created which mimics the standard modules by the same names. It is a
policy decision which built-in modules and operations are made
available; this module provides a reasonable default, but derived
classes can change the policies e.g. by overriding or extending class
variables like ok_builtin_modules or methods like make_sys().
XXX To do:
- r_open should allow writing tmp dir
- r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?)
"""
import sys
import __builtin__
import os
import ihooks
import imp
__all__ = ["RExec"]
class FileBase:
ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline',
'readlines', 'seek', 'tell', 'write', 'writelines',
'__iter__')
class FileWrapper(FileBase):
# XXX This is just like a Bastion -- should use that!
def __init__(self, f):
for m in self.ok_file_methods:
if not hasattr(self, m) and hasattr(f, m):
setattr(self, m, getattr(f, m))
def close(self):
self.flush()
TEMPLATE = """
def %s(self, *args):
return getattr(self.mod, self.name).%s(*args)
"""
class FileDelegate(FileBase):
def __init__(self, mod, name):
self.mod = mod
self.name = name
for m in FileBase.ok_file_methods + ('close',):
exec(TEMPLATE % (m, m))
class RHooks(ihooks.Hooks):
def __init__(self, *args):
# Hacks to support both old and new interfaces:
# old interface was RHooks(rexec[, verbose])
# new interface is RHooks([verbose])
verbose = 0
rexec = None
if args and type(args[-1]) == type(0):
verbose = args[-1]
args = args[:-1]
if args and hasattr(args[0], '__class__'):
rexec = args[0]
args = args[1:]
if args:
raise TypeError, "too many arguments"
ihooks.Hooks.__init__(self, verbose)
self.rexec = rexec
def set_rexec(self, rexec):
# Called by RExec instance to complete initialization
self.rexec = rexec
def get_suffixes(self):
return self.rexec.get_suffixes()
def is_builtin(self, name):
return self.rexec.is_builtin(name)
def init_builtin(self, name):
m = __import__(name)
return self.rexec.copy_except(m, ())
def init_frozen(self, name): raise SystemError, "don't use this"
def load_source(self, *args): raise SystemError, "don't use this"
def load_compiled(self, *args): raise SystemError, "don't use this"
def load_package(self, *args): raise SystemError, "don't use this"
def load_dynamic(self, name, filename, file):
return self.rexec.load_dynamic(name, filename, file)
def add_module(self, name):
return self.rexec.add_module(name)
def modules_dict(self):
return self.rexec.modules
def default_path(self):
return self.rexec.modules['sys'].path
# XXX Backwards compatibility
RModuleLoader = ihooks.FancyModuleLoader
RModuleImporter = ihooks.ModuleImporter
class RExec(ihooks._Verbose):
"""Basic restricted execution framework.
Code executed in this restricted environment will only have access to
modules and functions that are deemed safe; you can subclass RExec to
add or remove capabilities as desired.
The RExec class can prevent code from performing unsafe operations like
reading or writing disk files, or using TCP/IP sockets. However, it does
not protect against code using extremely large amounts of memory or
processor time.
"""
ok_path = tuple(sys.path) # That's a policy decision
ok_builtin_modules = ('audioop', 'array', 'binascii',
'cmath', 'errno', 'imageop',
'marshal', 'math', 'md5', 'operator',
'parser', 'select',
'sha', '_sre', 'strop', 'struct', 'time',
'_weakref')
ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink',
'stat', 'times', 'uname', 'getpid', 'getppid',
'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')
ok_sys_names = ('byteorder', 'copyright', 'exit', 'getdefaultencoding',
'getrefcount', 'hexversion', 'maxint', 'maxunicode',
'platform', 'ps1', 'ps2', 'version', 'version_info')
nok_builtin_names = ('open', 'file', 'reload', '__import__')
ok_file_types = (imp.C_EXTENSION, imp.PY_SOURCE)
def __init__(self, hooks = None, verbose = 0):
"""Returns an instance of the RExec class.
The hooks parameter is an instance of the RHooks class or a subclass
of it. If it is omitted or None, the default RHooks class is
instantiated.
Whenever the RExec module searches for a module (even a built-in one)
or reads a module's code, it doesn't actually go out to the file
system itself. Rather, it calls methods of an RHooks instance that
was passed to or created by its constructor. (Actually, the RExec
object doesn't make these calls --- they are made by a module loader
object that's part of the RExec object. This allows another level of
flexibility, which can be useful when changing the mechanics of
import within the restricted environment.)
By providing an alternate RHooks object, we can control the file
system accesses made to import a module, without changing the
actual algorithm that controls the order in which those accesses are
made. For instance, we could substitute an RHooks object that
passes all filesystem requests to a file server elsewhere, via some
RPC mechanism such as ILU. Grail's applet loader uses this to support
importing applets from a URL for a directory.
If the verbose parameter is true, additional debugging output may be
sent to standard output.
"""
raise RuntimeError, "This code is not secure in Python 2.2 and later"
ihooks._Verbose.__init__(self, verbose)
# XXX There's a circular reference here:
self.hooks = hooks or RHooks(verbose)
self.hooks.set_rexec(self)
self.modules = {}
self.ok_dynamic_modules = self.ok_builtin_modules
list = []
for mname in self.ok_builtin_modules:
if mname in sys.builtin_module_names:
list.append(mname)
self.ok_builtin_modules = tuple(list)
self.set_trusted_path()
self.make_builtin()
self.make_initial_modules()
# make_sys must be last because it adds the already created
# modules to its builtin_module_names
self.make_sys()
self.loader = RModuleLoader(self.hooks, verbose)
self.importer = RModuleImporter(self.loader, verbose)
def set_trusted_path(self):
# Set the path from which dynamic modules may be loaded.
# Those dynamic modules must also occur in ok_builtin_modules
self.trusted_path = filter(os.path.isabs, sys.path)
def load_dynamic(self, name, filename, file):
if name not in self.ok_dynamic_modules:
raise ImportError, "untrusted dynamic module: %s" % name
if name in sys.modules:
src = sys.modules[name]
else:
src = imp.load_dynamic(name, filename, file)
dst = self.copy_except(src, [])
return dst
def make_initial_modules(self):
self.make_main()
self.make_osname()
# Helpers for RHooks
def get_suffixes(self):
return [item # (suff, mode, type)
for item in imp.get_suffixes()
if item[2] in self.ok_file_types]
def is_builtin(self, mname):
return mname in self.ok_builtin_modules
# The make_* methods create specific built-in modules
def make_builtin(self):
m = self.copy_except(__builtin__, self.nok_builtin_names)
m.__import__ = self.r_import
m.reload = self.r_reload
m.open = m.file = self.r_open
def make_main(self):
m = self.add_module('__main__')
def make_osname(self):
osname = os.name
src = __import__(osname)
dst = self.copy_only(src, self.ok_posix_names)
dst.environ = e = {}
for key, value in os.environ.items():
e[key] = value
def make_sys(self):
m = self.copy_only(sys, self.ok_sys_names)
m.modules = self.modules
m.argv = ['RESTRICTED']
m.path = map(None, self.ok_path)
m.exc_info = self.r_exc_info
m = self.modules['sys']
l = self.modules.keys() + list(self.ok_builtin_modules)
l.sort()
m.builtin_module_names = tuple(l)
# The copy_* methods copy existing modules with some changes
def copy_except(self, src, exceptions):
dst = self.copy_none(src)
for name in dir(src):
setattr(dst, name, getattr(src, name))
for name in exceptions:
try:
delattr(dst, name)
except AttributeError:
pass
return dst
def copy_only(self, src, names):
dst = self.copy_none(src)
for name in names:
try:
value = getattr(src, name)
except AttributeError:
continue
setattr(dst, name, value)
return dst
def copy_none(self, src):
m = self.add_module(src.__name__)
m.__doc__ = src.__doc__
return m
# Add a module -- return an existing module or create one
def add_module(self, mname):
m = self.modules.get(mname)
if m is None:
self.modules[mname] = m = self.hooks.new_module(mname)
m.__builtins__ = self.modules['__builtin__']
return m
# The r* methods are public interfaces
def r_exec(self, code):
"""Execute code within a restricted environment.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
m = self.add_module('__main__')
exec(code, m.__dict__)
def r_eval(self, code):
"""Evaluate code within a restricted environment.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
m = self.add_module('__main__')
return eval(code, m.__dict__)
def r_execfile(self, file):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
"""
m = self.add_module('__main__')
execfile(file, m.__dict__)
def r_import(self, mname, globals={}, locals={}, fromlist=[]):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.import_module(mname, globals, locals, fromlist)
def r_reload(self, m):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.reload(m)
def r_unload(self, m):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
return self.importer.unload(m)
# The s_* methods are similar but also swap std{in,out,err}
def make_delegate_files(self):
s = self.modules['sys']
self.delegate_stdin = FileDelegate(s, 'stdin')
self.delegate_stdout = FileDelegate(s, 'stdout')
self.delegate_stderr = FileDelegate(s, 'stderr')
self.restricted_stdin = FileWrapper(sys.stdin)
self.restricted_stdout = FileWrapper(sys.stdout)
self.restricted_stderr = FileWrapper(sys.stderr)
def set_files(self):
if not hasattr(self, 'save_stdin'):
self.save_files()
if not hasattr(self, 'delegate_stdin'):
self.make_delegate_files()
s = self.modules['sys']
s.stdin = self.restricted_stdin
s.stdout = self.restricted_stdout
s.stderr = self.restricted_stderr
sys.stdin = self.delegate_stdin
sys.stdout = self.delegate_stdout
sys.stderr = self.delegate_stderr
def reset_files(self):
self.restore_files()
s = self.modules['sys']
self.restricted_stdin = s.stdin
self.restricted_stdout = s.stdout
self.restricted_stderr = s.stderr
def save_files(self):
self.save_stdin = sys.stdin
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
def restore_files(self):
sys.stdin = self.save_stdin
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
def s_apply(self, func, args=(), kw={}):
self.save_files()
try:
self.set_files()
r = func(*args, **kw)
finally:
self.restore_files()
return r
def s_exec(self, *args):
"""Execute code within a restricted environment.
Similar to the r_exec() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing one or more
lines of Python code, or a compiled code object, which will be
executed in the restricted environment's __main__ module.
"""
return self.s_apply(self.r_exec, args)
def s_eval(self, *args):
"""Evaluate code within a restricted environment.
Similar to the r_eval() method, but the code will be granted access
to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
The code parameter must either be a string containing a Python
expression, or a compiled code object, which will be evaluated in
the restricted environment's __main__ module. The value of the
expression or code object will be returned.
"""
return self.s_apply(self.r_eval, args)
def s_execfile(self, *args):
"""Execute the Python code in the file in the restricted
environment's __main__ module.
Similar to the r_execfile() method, but the code will be granted
access to restricted versions of the standard I/O streams sys.stdin,
sys.stderr, and sys.stdout.
"""
return self.s_apply(self.r_execfile, args)
def s_import(self, *args):
"""Import a module, raising an ImportError exception if the module
is considered unsafe.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_import() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_import, args)
def s_reload(self, *args):
"""Reload the module object, re-parsing and re-initializing it.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_reload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_reload, args)
def s_unload(self, *args):
"""Unload the module.
Removes it from the restricted environment's sys.modules dictionary.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
Similar to the r_unload() method, but has access to restricted
versions of the standard I/O streams sys.stdin, sys.stderr, and
sys.stdout.
"""
return self.s_apply(self.r_unload, args)
# Restricted open(...)
def r_open(self, file, mode='r', buf=-1):
"""Method called when open() is called in the restricted environment.
The arguments are identical to those of the open() function, and a
file object (or a class instance compatible with file objects)
should be returned. RExec's default behaviour is allow opening
any file for reading, but forbidding any attempt to write a file.
This method is implicitly called by code executing in the
restricted environment. Overriding this method in a subclass is
used to change the policies enforced by a restricted environment.
"""
mode = str(mode)
if mode not in ('r', 'rb'):
raise IOError, "can't open files for writing in restricted mode"
return open(file, mode, buf)
# Restricted version of sys.exc_info()
def r_exc_info(self):
ty, va, tr = sys.exc_info()
tr = None
return ty, va, tr
def test():
import getopt, traceback
opts, args = getopt.getopt(sys.argv[1:], 'vt:')
verbose = 0
trusted = []
for o, a in opts:
if o == '-v':
verbose = verbose+1
if o == '-t':
trusted.append(a)
r = RExec(verbose=verbose)
if trusted:
r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
if args:
r.modules['sys'].argv = args
r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
else:
r.modules['sys'].path.insert(0, "")
fp = sys.stdin
if args and args[0] != '-':
try:
fp = open(args[0])
except IOError as msg:
print("%s: can't open file %r" % (sys.argv[0], args[0]))
return 1
if fp.isatty():
try:
import readline
except ImportError:
pass
import code
class RestrictedConsole(code.InteractiveConsole):
def runcode(self, co):
self.locals['__builtins__'] = r.modules['__builtin__']
r.s_apply(code.InteractiveConsole.runcode, (self, co))
try:
RestrictedConsole(r.modules['__main__'].__dict__).interact()
except SystemExit as n:
return n
else:
text = fp.read()
fp.close()
c = compile(text, fp.name, 'exec')
try:
r.s_exec(c)
except SystemExit as n:
return n
except:
traceback.print_exc()
return 1
if __name__ == '__main__':
sys.exit(test())

View File

@ -35,7 +35,6 @@ class AllTest(unittest.TestCase):
import _socket
self.check_all("BaseHTTPServer")
self.check_all("Bastion")
self.check_all("CGIHTTPServer")
self.check_all("ConfigParser")
self.check_all("Cookie")
@ -124,7 +123,6 @@ class AllTest(unittest.TestCase):
self.check_all("random")
self.check_all("re")
self.check_all("repr")
self.check_all("rexec")
self.check_all("rfc822")
self.check_all("rlcompleter")
self.check_all("robotparser")

View File

@ -1,3 +0,0 @@
##import Bastion
##
##Bastion._test()

View File

@ -1,265 +0,0 @@
import compiler
from compiler.ast import flatten
import os, sys, time, unittest
import test.test_support
from random import random
# How much time in seconds can pass before we print a 'Still working' message.
_PRINT_WORKING_MSG_INTERVAL = 5 * 60
class TrivialContext(object):
def __enter__(self):
return self
def __exit__(self, *exc_info):
pass
class CompilerTest(unittest.TestCase):
def testCompileLibrary(self):
# A simple but large test. Compile all the code in the
# standard library and its test suite. This doesn't verify
# that any of the code is correct, merely the compiler is able
# to generate some kind of code for it.
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
libdir = os.path.dirname(unittest.__file__)
testdir = os.path.dirname(test.test_support.__file__)
for dir in [libdir, testdir]:
for basename in os.listdir(dir):
# Print still working message since this test can be really slow
if next_time <= time.time():
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
print(' testCompileLibrary still working, be patient...', file=sys.__stdout__)
sys.__stdout__.flush()
if not basename.endswith(".py"):
continue
if not TEST_ALL and random() < 0.98:
continue
path = os.path.join(dir, basename)
if test.test_support.verbose:
print("compiling", path)
f = open(path, "U")
buf = f.read()
f.close()
if "badsyntax" in basename or "bad_coding" in basename:
self.assertRaises(SyntaxError, compiler.compile,
buf, basename, "exec")
else:
try:
compiler.compile(buf, basename, "exec")
except Exception as e:
args = list(e.args) or [""]
args[0] = "%s [in file %s]" % (args[0], basename)
e.args = tuple(args)
raise
def testNewClassSyntax(self):
compiler.compile("class foo():pass\n\n","<string>","exec")
def testYieldExpr(self):
compiler.compile("def g(): yield\n\n", "<string>", "exec")
def testTryExceptFinally(self):
# Test that except and finally clauses in one try stmt are recognized
c = compiler.compile("try:\n 1/0\nexcept:\n e = 1\nfinally:\n f = 1",
"<string>", "exec")
dct = {}
exec(c, dct)
self.assertEquals(dct.get('e'), 1)
self.assertEquals(dct.get('f'), 1)
def testDefaultArgs(self):
self.assertRaises(SyntaxError, compiler.parse, "def foo(a=1, b): pass")
def testDocstrings(self):
c = compiler.compile('"doc"', '<string>', 'exec')
self.assert_('__doc__' in c.co_names)
c = compiler.compile('def f():\n "doc"', '<string>', 'exec')
g = {}
exec(c, g)
self.assertEquals(g['f'].__doc__, "doc")
def testLineNo(self):
# Test that all nodes except Module have a correct lineno attribute.
filename = __file__
if filename.endswith((".pyc", ".pyo")):
filename = filename[:-1]
tree = compiler.parseFile(filename)
self.check_lineno(tree)
def check_lineno(self, node):
try:
self._check_lineno(node)
except AssertionError:
print(node.__class__, node.lineno)
raise
def _check_lineno(self, node):
if not node.__class__ in NOLINENO:
self.assert_(isinstance(node.lineno, int),
"lineno=%s on %s" % (node.lineno, node.__class__))
self.assert_(node.lineno > 0,
"lineno=%s on %s" % (node.lineno, node.__class__))
for child in node.getChildNodes():
self.check_lineno(child)
def testFlatten(self):
self.assertEquals(flatten([1, [2]]), [1, 2])
self.assertEquals(flatten((1, (2,))), [1, 2])
def testNestedScope(self):
c = compiler.compile('def g():\n'
' a = 1\n'
' def f(): return a + 2\n'
' return f()\n'
'result = g()',
'<string>',
'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct.get('result'), 3)
c = compiler.compile('def g(a):\n'
' def f(): return a + 2\n'
' return f()\n'
'result = g(1)',
'<string>',
'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct.get('result'), 3)
c = compiler.compile('def g((a, b)):\n'
' def f(): return a + b\n'
' return f()\n'
'result = g((1, 2))',
'<string>',
'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct.get('result'), 3)
def testGenExp(self):
c = compiler.compile('list((i,j) for i in range(3) if i < 3'
' for j in range(4) if j > 2)',
'<string>',
'eval')
self.assertEquals(eval(c), [(0, 3), (1, 3), (2, 3)])
def testFuncAnnotations(self):
testdata = [
('def f(a: 1): pass', {'a': 1}),
('''def f(a, (b:1, c:2, d), e:3=4, f=5,
*g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass
''', {'b': 1, 'c': 2, 'e': 3, 'g': 6, 'h': 7, 'j': 9,
'k': 11, 'return': 12}),
]
for sourcecode, expected in testdata:
# avoid IndentationError: unexpected indent from trailing lines
sourcecode = sourcecode.rstrip()+'\n'
c = compiler.compile(sourcecode, '<string>', 'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct['f'].__annotations__, expected)
def testWith(self):
# SF bug 1638243
c = compiler.compile('from __future__ import with_statement\n'
'def f():\n'
' with TrivialContext():\n'
' return 1\n'
'result = f()',
'<string>',
'exec' )
dct = {'TrivialContext': TrivialContext}
exec(c, dct)
self.assertEquals(dct.get('result'), 1)
def testWithAss(self):
c = compiler.compile('from __future__ import with_statement\n'
'def f():\n'
' with TrivialContext() as tc:\n'
' return 1\n'
'result = f()',
'<string>',
'exec' )
dct = {'TrivialContext': TrivialContext}
exec(c, dct)
self.assertEquals(dct.get('result'), 1)
def testBytesLiteral(self):
c = compiler.compile("b'foo'", '<string>', 'eval')
b = eval(c)
c = compiler.compile('def f(b=b"foo"):\n'
' b[0] += 1\n'
' return b\n'
'f(); f(); result = f()\n',
'<string>',
'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct.get('result'), b"ioo")
c = compiler.compile('def f():\n'
' b = b"foo"\n'
' b[0] += 1\n'
' return b\n'
'f(); f(); result = f()\n',
'<string>',
'exec')
dct = {}
exec(c, dct)
self.assertEquals(dct.get('result'), b"goo")
NOLINENO = (compiler.ast.Module, compiler.ast.Stmt, compiler.ast.Discard)
###############################################################################
# code below is just used to trigger some possible errors, for the benefit of
# testLineNo
###############################################################################
class Toto:
"""docstring"""
pass
a, b = 2, 3
[c, d] = 5, 6
l = [(x, y) for x, y in zip(range(5), range(5,10))]
l[0]
l[3:4]
d = {'a': 2}
d = {}
t = ()
t = (1, 2)
l = []
l = [1, 2]
if l:
pass
else:
a, b = b, a
try:
print(yo)
except:
yo = 3
else:
yo += 3
try:
a += b
finally:
b = 0
from math import *
###############################################################################
def test_main(all=False):
global TEST_ALL
TEST_ALL = all or test.test_support.is_resource_enabled("compiler")
test.test_support.run_unittest(CompilerTest)
if __name__ == "__main__":
import sys
test_main('all' in sys.argv)

View File

@ -2452,49 +2452,6 @@ def keywords():
raise TestFailed("expected TypeError from bogus keyword "
"argument to %r" % constructor)
def restricted():
# XXX This test is disabled because rexec is not deemed safe
return
import rexec
if verbose:
print("Testing interaction with restricted execution ...")
sandbox = rexec.RExec()
code1 = """f = open(%r, 'w')""" % TESTFN
code2 = """f = open(%r, 'w')""" % TESTFN
code3 = """\
f = open(%r)
t = type(f) # a sneaky way to get the file() constructor
f.close()
f = t(%r, 'w') # rexec can't catch this by itself
""" % (TESTFN, TESTFN)
f = open(TESTFN, 'w') # Create the file so code3 can find it.
f.close()
try:
for code in code1, code2, code3:
try:
sandbox.r_exec(code)
except IOError as msg:
if str(msg).find("restricted") >= 0:
outcome = "OK"
else:
outcome = "got an exception, but not an expected one"
else:
outcome = "expected a restricted-execution exception"
if outcome != "OK":
raise TestFailed("%s, in %r" % (outcome, code))
finally:
try:
import os
os.unlink(TESTFN)
except:
pass
def str_subclass_as_dict_key():
if verbose:
print("Testing a str subclass used as dict key ..")
@ -4173,7 +4130,6 @@ def test_main():
supers()
inherits()
keywords()
restricted()
str_subclass_as_dict_key()
classic_comparisons()
rich_comparisons()

View File

@ -251,7 +251,7 @@ class ImportHooksTestCase(ImportHooksBaseTestCase):
i = ImpWrapper()
sys.meta_path.append(i)
sys.path_hooks.append(ImpWrapper)
mnames = ("colorsys", "urlparse", "distutils.core", "compiler.misc")
mnames = ("colorsys", "urlparse", "distutils.core")
for mname in mnames:
parent = mname.split(".")[0]
for n in list(sys.modules.keys()):

View File

@ -1,58 +0,0 @@
# Testing md5 module
import unittest
from md5 import md5
from test import test_support
def hexstr(s):
import string
h = string.hexdigits
r = ''
for c in s:
i = ord(c)
r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
return r
class MD5_Test(unittest.TestCase):
def md5test(self, s, expected):
self.assertEqual(hexstr(md5(s).digest()), expected)
self.assertEqual(md5(s).hexdigest(), expected)
def test_basics(self):
eq = self.md5test
eq('', 'd41d8cd98f00b204e9800998ecf8427e')
eq('a', '0cc175b9c0f1b6a831c399e269772661')
eq('abc', '900150983cd24fb0d6963f7d28e17f72')
eq('message digest', 'f96b697d7cb7938d525a2f31aaf161d0')
eq('abcdefghijklmnopqrstuvwxyz', 'c3fcd3d76192e4007dfb496cca67e13b')
eq('ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
'd174ab98d277d9f5a5611c2c9f419d9f')
eq('12345678901234567890123456789012345678901234567890123456789012345678901234567890',
'57edf4a22be3c955ac49da2e2107b67a')
def test_hexdigest(self):
# hexdigest is new with Python 2.0
m = md5('testing the hexdigest method')
h = m.hexdigest()
self.assertEqual(hexstr(m.digest()), h)
def test_large_update(self):
aas = 'a' * 64
bees = 'b' * 64
cees = 'c' * 64
m1 = md5()
m1.update(aas)
m1.update(bees)
m1.update(cees)
m2 = md5()
m2.update(aas + bees + cees)
self.assertEqual(m1.digest(), m2.digest())
def test_main():
test_support.run_unittest(MD5_Test)
if __name__ == '__main__':
test_main()

View File

@ -3,7 +3,7 @@
# hashing algorithms.
#
import md5, sha, hmac
import hmac
def check_hash_module(module, key=None):
assert hasattr(module, 'digest_size'), "Must have digest_size"
@ -45,6 +45,4 @@ def check_hash_module(module, key=None):
if __name__ == '__main__':
check_hash_module(md5)
check_hash_module(sha)
check_hash_module(hmac, key='abc')

View File

@ -11,10 +11,6 @@ from unittest import TestCase
StaticMethodType = type(staticmethod(lambda: None))
ClassMethodType = type(classmethod(lambda c: None))
# This next line triggers an error on old versions of pyclbr.
from commands import getstatus
# Here we test the python class browser code.
#
# The main function in this suite, 'testModule', compares the output

View File

@ -1,52 +0,0 @@
# Testing sha module (NIST's Secure Hash Algorithm)
# use the three examples from Federal Information Processing Standards
# Publication 180-1, Secure Hash Standard, 1995 April 17
# http://www.itl.nist.gov/div897/pubs/fip180-1.htm
import sha
import unittest
from test import test_support
class SHATestCase(unittest.TestCase):
def check(self, data, digest):
# Check digest matches the expected value
obj = sha.new(data)
computed = obj.hexdigest()
self.assert_(computed == digest)
# Verify that the value doesn't change between two consecutive
# digest operations.
computed_again = obj.hexdigest()
self.assert_(computed == computed_again)
# Check hexdigest() output matches digest()'s output
digest = obj.digest()
hexd = ""
for c in digest:
hexd += '%02x' % ord(c)
self.assert_(computed == hexd)
def test_case_1(self):
self.check("abc",
"a9993e364706816aba3e25717850c26c9cd0d89d")
def test_case_2(self):
self.check("abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"84983e441c3bd26ebaae4aa1f95129e5e54670f1")
def test_case_3(self):
self.check("a" * 1000000,
"34aa973cd4c4daa4f61eeb2bdbad27316534016f")
def test_case_4(self):
self.check(chr(0xAA) * 80,
'4ca0ef38f1794b28a8f8ee110ee79d48ce13be25')
def test_main():
test_support.run_unittest(SHATestCase)
if __name__ == "__main__":
test_main()

View File

@ -49,7 +49,6 @@ import posixfile
import pstats
import py_compile
import pydoc
import rexec
import rlcompleter
import sched
import smtplib

View File

@ -5,7 +5,7 @@ import os
import shutil
import tempfile
import StringIO
import md5
from hashlib import md5
import errno
import unittest
@ -25,7 +25,7 @@ except ImportError:
bz2 = None
def md5sum(data):
return md5.new(data).hexdigest()
return md5(data).hexdigest()
def path(path):
return test_support.findfile(path)

View File

@ -1,35 +0,0 @@
import unittest
from test import test_support
from compiler import transformer, ast
from compiler import compile
class Tests(unittest.TestCase):
def testMultipleLHS(self):
""" Test multiple targets on the left hand side. """
snippets = ['a, b = 1, 2',
'(a, b) = 1, 2',
'((a, b), c) = (1, 2), 3']
for s in snippets:
a = transformer.parse(s)
assert isinstance(a, ast.Module)
child1 = a.getChildNodes()[0]
assert isinstance(child1, ast.Stmt)
child2 = child1.getChildNodes()[0]
assert isinstance(child2, ast.Assign)
# This actually tests the compiler, but it's a way to assure the ast
# is correct
c = compile(s, '<string>', 'single')
vals = {}
exec(c, vals)
assert vals['a'] == 1
assert vals['b'] == 2
def test_main():
test_support.run_unittest(Tests)
if __name__ == "__main__":
test_main()

View File

@ -31,11 +31,7 @@ DictType = DictionaryType = dict
def _f(): pass
FunctionType = type(_f)
LambdaType = type(lambda: None) # Same as FunctionType
try:
CodeType = type(_f.__code__)
except RuntimeError:
# Execution in restricted environment
pass
CodeType = type(_f.__code__)
def _g():
yield 1
@ -55,14 +51,9 @@ ModuleType = type(sys)
try:
raise TypeError
except TypeError:
try:
tb = sys.exc_info()[2]
TracebackType = type(tb)
FrameType = type(tb.tb_frame)
except AttributeError:
# In the restricted environment, exc_info returns (None, None,
# None) Then, tb.tb_frame gives an attribute error
pass
tb = sys.exc_info()[2]
TracebackType = type(tb)
FrameType = type(tb.tb_frame)
tb = None; del tb
SliceType = slice

View File

@ -535,8 +535,8 @@ def uuid1(node=None, clock_seq=None):
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
import md5
hash = md5.md5(namespace.bytes + name).digest()
import hashlib
hash = hashlib.md5(namespace.bytes + name).digest()
return UUID(bytes=hash[:16], version=3)
def uuid4():

View File

@ -26,6 +26,9 @@ TO DO
Core and Builtins
-----------------
- Remove the f_restricted attribute from frames. This naturally leads to teh
removal of PyEval_GetRestricted() and PyFrame_IsRestricted().
- PEP 3132 was accepted. That means that you can do ``a, *b = range(5)``
to assign 0 to a and [1, 2, 3, 4] to b.
@ -175,11 +178,19 @@ Extension Modules
Library
-------
- Remove the compiler package. Use of the _ast module and (an eventual) AST ->
bytecode mechanism.
- Remove md5 and sha. Both have been deprecated since Python 2.5.
- Remove Bastion and rexec as they have been disabled since Python 2.3 (this
also leads to the C API support for restricted execution).
- Remove obsolete IRIX modules: al, cd, cl, fl, fm, gl, imgfile, sgi, sv.
- Remove bsddb185 module it was obsolete.
- Remove bsddb185 module; it was obsolete.
- Remove commands.getstatus() it was obsolete.
- Remove commands.getstatus(); it was obsolete.
- Remove functions in string and strop modules that are also string methods.

View File

@ -2701,21 +2701,8 @@ newPicklerobject(PyObject *file, int proto)
}
}
if (PyEval_GetRestricted()) {
/* Restricted execution, get private tables */
PyObject *m = PyImport_Import(copy_reg_str);
if (m == NULL)
goto err;
self->dispatch_table = PyObject_GetAttr(m, dispatch_table_str);
Py_DECREF(m);
if (self->dispatch_table == NULL)
goto err;
}
else {
self->dispatch_table = dispatch_table;
Py_INCREF(dispatch_table);
}
self->dispatch_table = dispatch_table;
Py_INCREF(dispatch_table);
PyObject_GC_Track(self);
return self;

View File

@ -207,15 +207,6 @@ open_the_file(PyFileObject *f, char *name, char *mode)
goto cleanup;
}
/* rexec.py can't stop a user from getting the file() constructor --
all they have to do is get *any* file object f, and then do
type(f). Here we prevent them from doing damage with it. */
if (PyEval_GetRestricted()) {
PyErr_SetString(PyExc_IOError,
"file() constructor not accessible in restricted mode");
f = NULL;
goto cleanup;
}
errno = 0;
#ifdef MS_WINDOWS

View File

@ -340,18 +340,12 @@ frame_settrace(PyFrameObject *f, PyObject* v, void *closure)
return 0;
}
static PyObject *
frame_getrestricted(PyFrameObject *f, void *closure)
{
return PyBool_FromLong(PyFrame_IsRestricted(f));
}
static PyGetSetDef frame_getsetlist[] = {
{"f_locals", (getter)frame_getlocals, NULL, NULL},
{"f_lineno", (getter)frame_getlineno,
(setter)frame_setlineno, NULL},
{"f_trace", (getter)frame_gettrace, (setter)frame_settrace, NULL},
{"f_restricted",(getter)frame_getrestricted,NULL, NULL},
{0}
};

View File

@ -234,21 +234,9 @@ static PyMemberDef func_memberlist[] = {
{NULL} /* Sentinel */
};
static int
restricted(void)
{
if (!PyEval_GetRestricted())
return 0;
PyErr_SetString(PyExc_RuntimeError,
"function attributes not accessible in restricted mode");
return 1;
}
static PyObject *
func_get_dict(PyFunctionObject *op)
{
if (restricted())
return NULL;
if (op->func_dict == NULL) {
op->func_dict = PyDict_New();
if (op->func_dict == NULL)
@ -263,8 +251,6 @@ func_set_dict(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
if (restricted())
return -1;
/* It is illegal to del f.func_dict */
if (value == NULL) {
PyErr_SetString(PyExc_TypeError,
@ -287,8 +273,6 @@ func_set_dict(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_code(PyFunctionObject *op)
{
if (restricted())
return NULL;
Py_INCREF(op->func_code);
return op->func_code;
}
@ -299,8 +283,6 @@ func_set_code(PyFunctionObject *op, PyObject *value)
PyObject *tmp;
Py_ssize_t nfree, nclosure;
if (restricted())
return -1;
/* Not legal to del f.func_code or to set it to anything
* other than a code object. */
if (value == NULL || !PyCode_Check(value)) {
@ -338,8 +320,6 @@ func_set_name(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
if (restricted())
return -1;
/* Not legal to del f.func_name or to set it to anything
* other than a string object. */
if (value == NULL || !PyString_Check(value)) {
@ -357,8 +337,6 @@ func_set_name(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_defaults(PyFunctionObject *op)
{
if (restricted())
return NULL;
if (op->func_defaults == NULL) {
Py_INCREF(Py_None);
return Py_None;
@ -372,8 +350,6 @@ func_set_defaults(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
if (restricted())
return -1;
/* Legal to del f.func_defaults.
* Can only set func_defaults to NULL or a tuple. */
if (value == Py_None)
@ -393,8 +369,6 @@ func_set_defaults(PyFunctionObject *op, PyObject *value)
static PyObject *
func_get_kwdefaults(PyFunctionObject *op)
{
if (restricted())
return NULL;
if (op->func_kwdefaults == NULL) {
Py_INCREF(Py_None);
return Py_None;
@ -408,9 +382,6 @@ func_set_kwdefaults(PyFunctionObject *op, PyObject *value)
{
PyObject *tmp;
if (restricted())
return -1;
if (value == Py_None)
value = NULL;
/* Legal to del f.func_kwdefaults.

View File

@ -158,11 +158,7 @@ static PyObject *
meth_get__self__(PyCFunctionObject *m, void *closure)
{
PyObject *self;
if (PyEval_GetRestricted()) {
PyErr_SetString(PyExc_RuntimeError,
"method.__self__ not accessible in restricted mode");
return NULL;
}
self = m->m_self;
if (self == NULL)
self = Py_None;

View File

@ -3366,13 +3366,6 @@ PyEval_GetFrame(void)
return _PyThreadState_GetFrame(tstate);
}
int
PyEval_GetRestricted(void)
{
PyFrameObject *current_frame = PyEval_GetFrame();
return current_frame == NULL ? 0 : PyFrame_IsRestricted(current_frame);
}
int
PyEval_MergeCompilerFlags(PyCompilerFlags *cf)
{

View File

@ -798,13 +798,7 @@ r_object(RFILE *p)
return v3;
case TYPE_CODE:
if (PyEval_GetRestricted()) {
PyErr_SetString(PyExc_RuntimeError,
"cannot unmarshal code objects in "
"restricted execution mode");
return NULL;
}
else {
{
int argcount;
int kwonlyargcount;
int nlocals;
@ -823,7 +817,7 @@ r_object(RFILE *p)
v = NULL;
/* XXX ignore long->int overflows for now */
/* XXX ignore long->int overflows for now */
argcount = (int)r_long(p);
kwonlyargcount = (int)r_long(p);
nlocals = (int)r_long(p);
@ -876,8 +870,8 @@ r_object(RFILE *p)
Py_XDECREF(name);
Py_XDECREF(lnotab);
return v;
}
return v;
default:
/* Bogus data got written, which isn't ideal.

View File

@ -54,11 +54,7 @@ PyObject *
PyMember_GetOne(const char *addr, PyMemberDef *l)
{
PyObject *v;
if ((l->flags & READ_RESTRICTED) &&
PyEval_GetRestricted()) {
PyErr_SetString(PyExc_RuntimeError, "restricted attribute");
return NULL;
}
addr += l->offset;
switch (l->type) {
case T_BYTE:
@ -167,10 +163,6 @@ PyMember_SetOne(char *addr, PyMemberDef *l, PyObject *v)
PyErr_SetString(PyExc_AttributeError, "readonly attribute");
return -1;
}
if ((l->flags & WRITE_RESTRICTED) && PyEval_GetRestricted()) {
PyErr_SetString(PyExc_RuntimeError, "restricted attribute");
return -1;
}
if (v == NULL && l->type != T_OBJECT_EX && l->type != T_OBJECT) {
PyErr_SetString(PyExc_TypeError,
"can't delete numeric/char attribute");

View File

@ -1,8 +0,0 @@
Fred L. Drake, Jr.
Mark Hammond
Shane Hathaway
Neil Schemenauer
Evan Simpson
Greg Stein
Bill Tutt
Moshe Zadka

View File

@ -1,18 +0,0 @@
This directory contains support tools for the Python compiler package,
which is now part of the standard library.
compile.py Demo that compiles a Python module into a .pyc file
using the pure-Python compiler code.
demo.py Prints the names of all the methods defined in a module,
as a demonstration of walking through the abstract syntax
tree produced by the parser.
dumppyc.py Dumps the contents of a .pyc file, printing
the attributes of the code object followed by a
code disassembly.
regrtest.py Runs the Python test suite using bytecode generated
by the pure-Python compiler code instead of the
builtin compiler.

View File

@ -1,105 +0,0 @@
# This file describes the nodes of the AST in ast.py. The module is
# generated by astgen.py.
# The descriptions use the following special notation to describe
# properties of the children:
# * this child is not a node
# ! this child is a sequence that contains nodes in it
# & this child may be set to None
# = ... a default value for the node constructor (optional args)
#
# If you add node types here, please be sure to update the list of
# Node types in Doc/lib/asttable.tex.
Module: doc*, node
Stmt: nodes!
Decorators: nodes!
Function: decorators&, name*, arguments!, defaults!, kwonlyargs!, returns&, flags*, doc*, code
Lambda: arguments!, defaults!, kwonlyargs!, flags*, code
SimpleArg: name*, annotation&
NestedArgs: args!
Kwarg: arg, expr
Class: name*, bases!, doc*, code
Pass:
Break:
Continue:
For: assign, list, body, else_&
While: test, body, else_&
With: expr, vars&, body
If: tests!, else_&
IfExp: test, then, else_
From: modname*, names*, level*
Import: names*
Raise: expr1&, expr2&, expr3&
TryFinally: body, final
TryExcept: body, handlers!, else_&
Return: value
Yield: value
Const: value*
Discard: expr
AugAssign: node, op*, expr
Assign: nodes!, expr
AssTuple: nodes!
AssList: nodes!
AssName: name*, flags*
AssAttr: expr, attrname*, flags*
ListComp: expr, quals!
ListCompFor: assign, list, ifs!
ListCompIf: test
GenExpr: code
GenExprInner: expr, quals!
GenExprFor: assign, iter, ifs!
GenExprIf: test
List: nodes!
Dict: items!
Set: items!
Not: expr
Compare: expr, ops!
Name: name*
Global: names*
Getattr: expr, attrname*
CallFunc: node, args!, star_args& = None, dstar_args& = None
Keyword: name*, expr
Subscript: expr, flags*, subs!
Sliceobj: nodes!
Slice: expr, flags*, lower&, upper&
Assert: test, fail&
Tuple: nodes!
Or: nodes!
And: nodes!
Bitor: nodes!
Bitxor: nodes!
Bitand: nodes!
LeftShift: (left, right)
RightShift: (left, right)
Add: (left, right)
Sub: (left, right)
Mul: (left, right)
Div: (left, right)
Mod: (left, right)
Power: (left, right)
FloorDiv: (left, right)
UnaryAdd: expr
UnarySub: expr
Invert: expr
init(Function):
self.varargs = self.kwargs = None
if flags & CO_VARARGS:
self.varargs = 1
if flags & CO_VARKEYWORDS:
self.kwargs = 1
init(Lambda):
self.varargs = self.kwargs = None
if flags & CO_VARARGS:
self.varargs = 1
if flags & CO_VARKEYWORDS:
self.kwargs = 1
self.returns = None
init(GenExpr):
self.arguments = [SimpleArg('.0', None)]
self.varargs = self.kwargs = None
self.kwonlyargs = ()
init(GenExprFor):
self.is_outmost = False

View File

@ -1,292 +0,0 @@
"""Generate ast module from specification
This script generates the ast module from a simple specification,
which makes it easy to accomodate changes in the grammar. This
approach would be quite reasonable if the grammar changed often.
Instead, it is rather complex to generate the appropriate code. And
the Node interface has changed more often than the grammar.
"""
import fileinput
import getopt
import re
import sys
from StringIO import StringIO
SPEC = "ast.txt"
COMMA = ", "
def load_boilerplate(file):
f = open(file)
buf = f.read()
f.close()
i = buf.find('### ''PROLOGUE')
j = buf.find('### ''EPILOGUE')
pro = buf[i+12:j].strip()
epi = buf[j+12:].strip()
return pro, epi
def strip_default(arg):
"""Return the argname from an 'arg = default' string"""
i = arg.find('=')
if i == -1:
return arg
t = arg[:i].strip()
return t
P_NODE = 1
P_OTHER = 2
P_NESTED = 3
P_NONE = 4
class NodeInfo:
"""Each instance describes a specific AST node"""
def __init__(self, name, args):
self.name = name
self.args = args.strip()
self.argnames = self.get_argnames()
self.argprops = self.get_argprops()
self.nargs = len(self.argnames)
self.init = []
def get_argnames(self):
if '(' in self.args:
i = self.args.find('(')
j = self.args.rfind(')')
args = self.args[i+1:j]
else:
args = self.args
return [strip_default(arg.strip())
for arg in args.split(',') if arg]
def get_argprops(self):
"""Each argument can have a property like '*' or '!'
XXX This method modifies the argnames in place!
"""
d = {}
hardest_arg = P_NODE
for i in range(len(self.argnames)):
arg = self.argnames[i]
if arg.endswith('*'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_OTHER
hardest_arg = max(hardest_arg, P_OTHER)
elif arg.endswith('!'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_NESTED
hardest_arg = max(hardest_arg, P_NESTED)
elif arg.endswith('&'):
arg = self.argnames[i] = arg[:-1]
d[arg] = P_NONE
hardest_arg = max(hardest_arg, P_NONE)
else:
d[arg] = P_NODE
self.hardest_arg = hardest_arg
if hardest_arg > P_NODE:
self.args = self.args.replace('*', '')
self.args = self.args.replace('!', '')
self.args = self.args.replace('&', '')
return d
def gen_source(self):
buf = StringIO()
print("class %s(Node):" % self.name, file=buf)
self._gen_init(buf)
print(file=buf)
self._gen_getChildren(buf)
print(file=buf)
self._gen_getChildNodes(buf)
print(file=buf)
self._gen_repr(buf)
buf.seek(0, 0)
return buf.read()
def _gen_init(self, buf):
if self.args:
print(" def __init__(self, %s, lineno=None):" % self.args, file=buf)
else:
print(" def __init__(self, lineno=None):", file=buf)
if self.argnames:
for name in self.argnames:
print(" self.%s = %s" % (name, name), file=buf)
print(" self.lineno = lineno", file=buf)
# Copy the lines in self.init, indented four spaces. The rstrip()
# business is to get rid of the four spaces if line happens to be
# empty, so that reindent.py is happy with the output.
for line in self.init:
print((" " + line).rstrip(), file=buf)
def _gen_getChildren(self, buf):
print(" def getChildren(self):", file=buf)
if len(self.argnames) == 0:
print(" return ()", file=buf)
else:
if self.hardest_arg < P_NESTED:
clist = COMMA.join(["self.%s" % c
for c in self.argnames])
if self.nargs == 1:
print(" return %s," % clist, file=buf)
else:
print(" return %s" % clist, file=buf)
else:
if len(self.argnames) == 1:
print(" return tuple(flatten(self.%s))" % self.argnames[0], file=buf)
else:
print(" children = []", file=buf)
template = " children.%s(%sself.%s%s)"
for name in self.argnames:
if self.argprops[name] == P_NESTED:
print(template % ("extend", "flatten(",
name, ")"), file=buf)
else:
print(template % ("append", "", name, ""), file=buf)
print(" return tuple(children)", file=buf)
def _gen_getChildNodes(self, buf):
print(" def getChildNodes(self):", file=buf)
if len(self.argnames) == 0:
print(" return ()", file=buf)
else:
if self.hardest_arg < P_NESTED:
clist = ["self.%s" % c
for c in self.argnames
if self.argprops[c] == P_NODE]
if len(clist) == 0:
print(" return ()", file=buf)
elif len(clist) == 1:
print(" return %s," % clist[0], file=buf)
else:
print(" return %s" % COMMA.join(clist), file=buf)
else:
print(" nodelist = []", file=buf)
template = " nodelist.%s(%sself.%s%s)"
for name in self.argnames:
if self.argprops[name] == P_NONE:
tmp = (" if self.%s is not None:\n"
" nodelist.append(self.%s)")
print(tmp % (name, name), file=buf)
elif self.argprops[name] == P_NESTED:
print(template % ("extend", "flatten_nodes(",
name, ")"), file=buf)
elif self.argprops[name] == P_NODE:
print(template % ("append", "", name, ""), file=buf)
print(" return tuple(nodelist)", file=buf)
def _gen_repr(self, buf):
print(" def __repr__(self):", file=buf)
if self.argnames:
fmt = COMMA.join(["%s"] * self.nargs)
if '(' in self.args:
fmt = '(%s)' % fmt
vals = ["repr(self.%s)" % name for name in self.argnames]
vals = COMMA.join(vals)
if self.nargs == 1:
vals = vals + ","
print(' return "%s(%s)" %% (%s)' % \
(self.name, fmt, vals), file=buf)
else:
print(' return "%s()"' % self.name, file=buf)
rx_init = re.compile('init\((.*)\):')
def parse_spec(file):
classes = {}
cur = None
for line in fileinput.input(file):
if line.strip().startswith('#'):
continue
mo = rx_init.search(line)
if mo is None:
if cur is None:
# a normal entry
try:
name, args = line.split(':')
except ValueError:
continue
classes[name] = NodeInfo(name, args)
cur = None
else:
# some code for the __init__ method
cur.init.append(line)
else:
# some extra code for a Node's __init__ method
name = mo.group(1)
cur = classes[name]
return sorted(classes.values(), key=lambda n: n.name)
def main():
prologue, epilogue = load_boilerplate(sys.argv[-1])
print(prologue)
print()
classes = parse_spec(SPEC)
for info in classes:
print(info.gen_source())
print(epilogue)
if __name__ == "__main__":
main()
sys.exit(0)
### PROLOGUE
"""Python abstract syntax node definitions
This file is automatically generated by Tools/compiler/astgen.py
"""
from compiler.consts import CO_VARARGS, CO_VARKEYWORDS
def flatten(seq):
l = []
for elt in seq:
t = type(elt)
if t is tuple or t is list:
for elt2 in flatten(elt):
l.append(elt2)
else:
l.append(elt)
return l
def flatten_nodes(seq):
return [n for n in flatten(seq) if isinstance(n, Node)]
nodes = {}
class Node:
"""Abstract base class for ast nodes."""
def getChildren(self):
pass # implemented by subclasses
def __iter__(self):
for n in self.getChildren():
yield n
def asList(self): # for backwards compatibility
return self.getChildren()
def getChildNodes(self):
pass # implemented by subclasses
class EmptyNode(Node):
def getChildNodes(self):
return ()
def getChildren(self):
return ()
class Expression(Node):
# Expression is an artificial node class to support "eval"
nodes["expression"] = "Expression"
def __init__(self, node):
self.node = node
def getChildren(self):
return self.node,
def getChildNodes(self):
return self.node,
def __repr__(self):
return "Expression(%s)" % (repr(self.node))
### EPILOGUE
for name, obj in globals().items():
if isinstance(obj, type) and issubclass(obj, Node):
nodes[name.lower()] = obj

View File

@ -1,51 +0,0 @@
import sys
import getopt
from compiler import compileFile, visitor
import profile
def main():
VERBOSE = 0
DISPLAY = 0
PROFILE = 0
CONTINUE = 0
opts, args = getopt.getopt(sys.argv[1:], 'vqdcp')
for k, v in opts:
if k == '-v':
VERBOSE = 1
visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1
if k == '-q':
if sys.platform[:3]=="win":
f = open('nul', 'wb') # /dev/null fails on Windows...
else:
f = open('/dev/null', 'wb')
sys.stdout = f
if k == '-d':
DISPLAY = 1
if k == '-c':
CONTINUE = 1
if k == '-p':
PROFILE = 1
if not args:
print "no files to compile"
else:
for filename in args:
if VERBOSE:
print filename
try:
if PROFILE:
profile.run('compileFile(%r, %r)' % (filename, DISPLAY),
filename + ".prof")
else:
compileFile(filename, DISPLAY)
except SyntaxError as err:
print err
if err.lineno is not None:
print err.lineno
if not CONTINUE:
sys.exit(-1)
if __name__ == "__main__":
main()

View File

@ -1,38 +0,0 @@
#! /usr/bin/env python
"""Print names of all methods defined in module
This script demonstrates use of the visitor interface of the compiler
package.
"""
import compiler
class MethodFinder:
"""Print the names of all the methods
Each visit method takes two arguments, the node and its current
scope. The scope is the name of the current class or None.
"""
def visitClass(self, node, scope=None):
self.visit(node.code, node.name)
def visitFunction(self, node, scope=None):
if scope is not None:
print "%s.%s" % (scope, node.name)
self.visit(node.code, None)
def main(files):
mf = MethodFinder()
for file in files:
f = open(file)
buf = f.read()
f.close()
ast = compiler.parse(buf)
compiler.walk(ast, mf)
if __name__ == "__main__":
import sys
main(sys.argv[1:])

View File

@ -1,47 +0,0 @@
#! /usr/bin/env python
import marshal
import os
import dis
import types
def dump(obj):
print obj
for attr in dir(obj):
if attr.startswith('co_'):
val = getattr(obj, attr)
print "\t", attr, repr(val)
def loadCode(path):
f = open(path)
f.read(8)
co = marshal.load(f)
f.close()
return co
def walk(co, match=None):
if match is None or co.co_name == match:
dump(co)
print
dis.dis(co)
for obj in co.co_consts:
if type(obj) == types.CodeType:
walk(obj, match)
def load(filename, codename=None):
co = loadCode(filename)
walk(co, codename)
if __name__ == "__main__":
import sys
if len(sys.argv) == 3:
filename, codename = sys.argv[1:]
else:
filename = sys.argv[1]
codename = None
if filename.endswith('.py'):
buf = open(filename).read()
co = compile(buf, filename, "exec")
walk(co)
else:
load(filename, codename)

View File

@ -1,83 +0,0 @@
"""Run the Python regression test using the compiler
This test runs the standard Python test suite using bytecode generated
by this compiler instead of by the builtin compiler.
The regression test is run with the interpreter in verbose mode so
that import problems can be observed easily.
"""
from compiler import compileFile
import os
import sys
import test
import tempfile
def copy_test_suite():
dest = tempfile.mkdtemp()
os.system("cp -r %s/* %s" % (test.__path__[0], dest))
print "Creating copy of test suite in", dest
return dest
def copy_library():
dest = tempfile.mkdtemp()
libdir = os.path.split(test.__path__[0])[0]
print "Found standard library in", libdir
print "Creating copy of standard library in", dest
os.system("cp -r %s/* %s" % (libdir, dest))
return dest
def compile_files(dir):
print "Compiling", dir, "\n\t",
line_len = 10
for file in os.listdir(dir):
base, ext = os.path.splitext(file)
if ext == '.py':
source = os.path.join(dir, file)
line_len = line_len + len(file) + 1
if line_len > 75:
print "\n\t",
line_len = len(source) + 9
print file,
try:
compileFile(source)
except SyntaxError as err:
print err
continue
# make sure the .pyc file is not over-written
os.chmod(source + "c", 444)
elif file == 'CVS':
pass
else:
path = os.path.join(dir, file)
if os.path.isdir(path):
print
print
compile_files(path)
print "\t",
line_len = 10
print
def run_regrtest(lib_dir):
test_dir = os.path.join(lib_dir, "test")
os.chdir(test_dir)
os.system("PYTHONPATH=%s %s -v regrtest.py" % (lib_dir, sys.executable))
def cleanup(dir):
os.system("rm -rf %s" % dir)
def raw_input(prompt):
sys.stdout.write(prompt)
sys.stdout.flush()
return sys.stdin.readline()
def main():
lib_dir = copy_library()
compile_files(lib_dir)
run_regrtest(lib_dir)
raw_input("Cleanup?")
cleanup(lib_dir)
if __name__ == "__main__":
main()

View File

@ -1,43 +0,0 @@
import compiler
import dis
import types
def extract_code_objects(co):
l = [co]
for const in co.co_consts:
if type(const) == types.CodeType:
l.append(const)
return l
def compare(a, b):
if not (a.co_name == "?" or a.co_name.startswith('<lambda')):
assert a.co_name == b.co_name, (a, b)
if a.co_stacksize != b.co_stacksize:
print "stack mismatch %s: %d vs. %d" % (a.co_name,
a.co_stacksize,
b.co_stacksize)
if a.co_stacksize > b.co_stacksize:
print "good code"
dis.dis(a)
print "bad code"
dis.dis(b)
assert 0
def main(files):
for file in files:
print file
buf = open(file).read()
try:
co1 = compile(buf, file, "exec")
except SyntaxError:
print "skipped"
continue
co2 = compiler.compile(buf, file, "exec")
co1l = extract_code_objects(co1)
co2l = extract_code_objects(co2)
for a, b in zip(co1l, co2l):
compare(a, b)
if __name__ == "__main__":
import sys
main(sys.argv[1:])