remove old metaclass demos

This commit is contained in:
Benjamin Peterson 2009-05-07 19:14:14 +00:00
parent 993527485e
commit 34c044ff5b
8 changed files with 0 additions and 1704 deletions

View File

@ -1,113 +0,0 @@
"""Support Eiffel-style preconditions and postconditions.
For example,
class C:
def m1(self, arg):
require arg > 0
return whatever
ensure Result > arg
can be written (clumsily, I agree) as:
class C(Eiffel):
def m1(self, arg):
return whatever
def m1_pre(self, arg):
assert arg > 0
def m1_post(self, Result, arg):
assert Result > arg
Pre- and post-conditions for a method, being implemented as methods
themselves, are inherited independently from the method. This gives
much of the same effect of Eiffel, where pre- and post-conditions are
inherited when a method is overridden by a derived class. However,
when a derived class in Python needs to extend a pre- or
post-condition, it must manually merge the base class' pre- or
post-condition with that defined in the derived class', for example:
class D(C):
def m1(self, arg):
return arg**2
def m1_post(self, Result, arg):
C.m1_post(self, Result, arg)
assert Result < 100
This gives derived classes more freedom but also more responsibility
than in Eiffel, where the compiler automatically takes care of this.
In Eiffel, pre-conditions combine using contravariance, meaning a
derived class can only make a pre-condition weaker; in Python, this is
up to the derived class. For example, a derived class that takes away
the requirement that arg > 0 could write:
def m1_pre(self, arg):
pass
but one could equally write a derived class that makes a stronger
requirement:
def m1_pre(self, arg):
require arg > 50
It would be easy to modify the classes shown here so that pre- and
post-conditions can be disabled (separately, on a per-class basis).
A different design would have the pre- or post-condition testing
functions return true for success and false for failure. This would
make it possible to implement automatic combination of inherited
and new pre-/post-conditions. All this is left as an exercise to the
reader.
"""
from Meta import MetaClass, MetaHelper, MetaMethodWrapper
class EiffelMethodWrapper(MetaMethodWrapper):
def __init__(self, func, inst):
MetaMethodWrapper.__init__(self, func, inst)
# Note that the following causes recursive wrappers around
# the pre-/post-condition testing methods. These are harmless
# but inefficient; to avoid them, the lookup must be done
# using the class.
try:
self.pre = getattr(inst, self.__name__ + "_pre")
except AttributeError:
self.pre = None
try:
self.post = getattr(inst, self.__name__ + "_post")
except AttributeError:
self.post = None
def __call__(self, *args, **kw):
if self.pre:
self.pre(*args, **kw)
Result = self.func(self.inst, *args, **kw)
if self.post:
self.post(Result, *args, **kw)
return Result
class EiffelHelper(MetaHelper):
__methodwrapper__ = EiffelMethodWrapper
class EiffelMetaClass(MetaClass):
__helper__ = EiffelHelper
Eiffel = EiffelMetaClass('Eiffel', (), {})
def _test():
class C(Eiffel):
def m1(self, arg):
return arg+1
def m1_pre(self, arg):
assert arg > 0, "precondition for m1 failed"
def m1_post(self, Result, arg):
assert Result > arg
x = C()
x.m1(12)
## x.m1(-1)
if __name__ == '__main__':
_test()

View File

@ -1,168 +0,0 @@
"""Enumeration metaclass.
XXX This is very much a work in progress.
"""
import string
class EnumMetaClass:
"""Metaclass for enumeration.
To define your own enumeration, do something like
class Color(Enum):
red = 1
green = 2
blue = 3
Now, Color.red, Color.green and Color.blue behave totally
different: they are enumerated values, not integers.
Enumerations cannot be instantiated; however they can be
subclassed.
"""
def __init__(self, name, bases, dict):
"""Constructor -- create an enumeration.
Called at the end of the class statement. The arguments are
the name of the new class, a tuple containing the base
classes, and a dictionary containing everything that was
entered in the class' namespace during execution of the class
statement. In the above example, it would be {'red': 1,
'green': 2, 'blue': 3}.
"""
for base in bases:
if base.__class__ is not EnumMetaClass:
raise TypeError("Enumeration base class must be enumeration")
bases = [x for x in bases if x is not Enum]
self.__name__ = name
self.__bases__ = bases
self.__dict = {}
for key, value in dict.items():
self.__dict[key] = EnumInstance(name, key, value)
def __getattr__(self, name):
"""Return an enumeration value.
For example, Color.red returns the value corresponding to red.
XXX Perhaps the values should be created in the constructor?
This looks in the class dictionary and if it is not found
there asks the base classes.
The special attribute __members__ returns the list of names
defined in this class (it does not merge in the names defined
in base classes).
"""
if name == '__members__':
return list(self.__dict.keys())
try:
return self.__dict[name]
except KeyError:
for base in self.__bases__:
try:
return getattr(base, name)
except AttributeError:
continue
raise AttributeError(name)
def __repr__(self):
s = self.__name__
if self.__bases__:
s = s + '(' + string.join([x.__name__ for x in self.__bases__], ", ") + ')'
if self.__dict:
list = []
for key, value in self.__dict.items():
list.append("%s: %s" % (key, int(value)))
s = "%s: {%s}" % (s, string.join(list, ", "))
return s
class EnumInstance:
"""Class to represent an enumeration value.
EnumInstance('Color', 'red', 12) prints as 'Color.red' and behaves
like the integer 12 when compared, but doesn't support arithmetic.
XXX Should it record the actual enumeration rather than just its
name?
"""
def __init__(self, classname, enumname, value):
self.__classname = classname
self.__enumname = enumname
self.__value = value
def __int__(self):
return self.__value
def __repr__(self):
return "EnumInstance(%r, %r, %r)" % (self.__classname,
self.__enumname,
self.__value)
def __str__(self):
return "%s.%s" % (self.__classname, self.__enumname)
def __cmp__(self, other):
return cmp(self.__value, int(other))
# Create the base class for enumerations.
# It is an empty enumeration.
Enum = EnumMetaClass("Enum", (), {})
def _test():
class Color(Enum):
red = 1
green = 2
blue = 3
print(Color.red)
print(dir(Color))
print(Color.red == Color.red)
print(Color.red == Color.blue)
print(Color.red == 1)
print(Color.red == 2)
class ExtendedColor(Color):
white = 0
orange = 4
yellow = 5
purple = 6
black = 7
print(ExtendedColor.orange)
print(ExtendedColor.red)
print(Color.red == ExtendedColor.red)
class OtherColor(Enum):
white = 4
blue = 5
class MergedColor(Color, OtherColor):
pass
print(MergedColor.red)
print(MergedColor.white)
print(Color)
print(ExtendedColor)
print(OtherColor)
print(MergedColor)
if __name__ == '__main__':
_test()

View File

@ -1,118 +0,0 @@
"""Generic metaclass.
XXX This is very much a work in progress.
"""
import types
class MetaMethodWrapper:
def __init__(self, func, inst):
self.func = func
self.inst = inst
self.__name__ = self.func.__name__
def __call__(self, *args, **kw):
return self.func(self.inst, *args, **kw)
class MetaHelper:
__methodwrapper__ = MetaMethodWrapper # For derived helpers to override
def __helperinit__(self, formalclass):
self.__formalclass__ = formalclass
def __getattr__(self, name):
# Invoked for any attr not in the instance's __dict__
try:
raw = self.__formalclass__.__getattr__(name)
except AttributeError:
try:
ga = self.__formalclass__.__getattr__('__usergetattr__')
except (KeyError, AttributeError):
raise AttributeError(name)
return ga(self, name)
if type(raw) != types.FunctionType:
return raw
return self.__methodwrapper__(raw, self)
class MetaClass:
"""A generic metaclass.
This can be subclassed to implement various kinds of meta-behavior.
"""
__helper__ = MetaHelper # For derived metaclasses to override
__inited = 0
def __init__(self, name, bases, dict):
try:
ga = dict['__getattr__']
except KeyError:
pass
else:
dict['__usergetattr__'] = ga
del dict['__getattr__']
self.__name__ = name
self.__bases__ = bases
self.__realdict__ = dict
self.__inited = 1
def __getattr__(self, name):
try:
return self.__realdict__[name]
except KeyError:
for base in self.__bases__:
try:
return base.__getattr__(name)
except AttributeError:
pass
raise AttributeError(name)
def __setattr__(self, name, value):
if not self.__inited:
self.__dict__[name] = value
else:
self.__realdict__[name] = value
def __call__(self, *args, **kw):
inst = self.__helper__()
inst.__helperinit__(self)
try:
init = inst.__getattr__('__init__')
except AttributeError:
init = lambda: None
init(*args, **kw)
return inst
Meta = MetaClass('Meta', (), {})
def _test():
class C(Meta):
def __init__(self, *args):
print("__init__, args =", args)
def m1(self, x):
print("m1(x=%r)" % (x,))
print(C)
x = C()
print(x)
x.m1(12)
class D(C):
def __getattr__(self, name):
if name[:2] == '__': raise AttributeError(name)
return "getattr:%s" % name
x = D()
print(x.foo)
print(x._foo)
## print x.__foo
## print x.__foo__
if __name__ == '__main__':
_test()

View File

@ -1,45 +0,0 @@
import types
class Tracing:
def __init__(self, name, bases, namespace):
"""Create a new class."""
self.__name__ = name
self.__bases__ = bases
self.__namespace__ = namespace
def __call__(self):
"""Create a new instance."""
return Instance(self)
class Instance:
def __init__(self, klass):
self.__klass__ = klass
def __getattr__(self, name):
try:
value = self.__klass__.__namespace__[name]
except KeyError:
raise AttributeError(name)
if type(value) is not types.FunctionType:
return value
return BoundMethod(value, self)
class BoundMethod:
def __init__(self, function, instance):
self.function = function
self.instance = instance
def __call__(self, *args):
print("calling", self.function, "for", self.instance, "with", args)
return self.function(self.instance, *args)
Trace = Tracing('Trace', (), {})
class MyTracedClass(Trace):
def method1(self, a):
self.a = a
def method2(self):
return self.a
aninstance = MyTracedClass()
aninstance.method1(10)
print(aninstance.method2())

View File

@ -1,255 +0,0 @@
"""Synchronization metaclass.
This metaclass makes it possible to declare synchronized methods.
"""
import _thread as thread
# First we need to define a reentrant lock.
# This is generally useful and should probably be in a standard Python
# library module. For now, we in-line it.
class Lock:
"""Reentrant lock.
This is a mutex-like object which can be acquired by the same
thread more than once. It keeps a reference count of the number
of times it has been acquired by the same thread. Each acquire()
call must be matched by a release() call and only the last
release() call actually releases the lock for acquisition by
another thread.
The implementation uses two locks internally:
__mutex is a short term lock used to protect the instance variables
__wait is the lock for which other threads wait
A thread intending to acquire both locks should acquire __wait
first.
The implementation uses two other instance variables, protected by
locking __mutex:
__tid is the thread ID of the thread that currently has the lock
__count is the number of times the current thread has acquired it
When the lock is released, __tid is None and __count is zero.
"""
def __init__(self):
"""Constructor. Initialize all instance variables."""
self.__mutex = thread.allocate_lock()
self.__wait = thread.allocate_lock()
self.__tid = None
self.__count = 0
def acquire(self, flag=1):
"""Acquire the lock.
If the optional flag argument is false, returns immediately
when it cannot acquire the __wait lock without blocking (it
may still block for a little while in order to acquire the
__mutex lock).
The return value is only relevant when the flag argument is
false; it is 1 if the lock is acquired, 0 if not.
"""
self.__mutex.acquire()
try:
if self.__tid == thread.get_ident():
self.__count = self.__count + 1
return 1
finally:
self.__mutex.release()
locked = self.__wait.acquire(flag)
if not flag and not locked:
return 0
try:
self.__mutex.acquire()
assert self.__tid == None
assert self.__count == 0
self.__tid = thread.get_ident()
self.__count = 1
return 1
finally:
self.__mutex.release()
def release(self):
"""Release the lock.
If this thread doesn't currently have the lock, an assertion
error is raised.
Only allow another thread to acquire the lock when the count
reaches zero after decrementing it.
"""
self.__mutex.acquire()
try:
assert self.__tid == thread.get_ident()
assert self.__count > 0
self.__count = self.__count - 1
if self.__count == 0:
self.__tid = None
self.__wait.release()
finally:
self.__mutex.release()
def _testLock():
done = []
def f2(lock, done=done):
lock.acquire()
print("f2 running in thread %d\n" % thread.get_ident(), end=' ')
lock.release()
done.append(1)
def f1(lock, f2=f2, done=done):
lock.acquire()
print("f1 running in thread %d\n" % thread.get_ident(), end=' ')
try:
f2(lock)
finally:
lock.release()
done.append(1)
lock = Lock()
lock.acquire()
f1(lock) # Adds 2 to done
lock.release()
lock.acquire()
thread.start_new_thread(f1, (lock,)) # Adds 2
thread.start_new_thread(f1, (lock, f1)) # Adds 3
thread.start_new_thread(f2, (lock,)) # Adds 1
thread.start_new_thread(f2, (lock,)) # Adds 1
lock.release()
import time
while len(done) < 9:
print(len(done))
time.sleep(0.001)
print(len(done))
# Now, the Locking metaclass is a piece of cake.
# As an example feature, methods whose name begins with exactly one
# underscore are not synchronized.
from Meta import MetaClass, MetaHelper, MetaMethodWrapper
class LockingMethodWrapper(MetaMethodWrapper):
def __call__(self, *args, **kw):
if self.__name__[:1] == '_' and self.__name__[1:] != '_':
return self.func(self.inst, *args, **kw)
self.inst.__lock__.acquire()
try:
return self.func(self.inst, *args, **kw)
finally:
self.inst.__lock__.release()
class LockingHelper(MetaHelper):
__methodwrapper__ = LockingMethodWrapper
def __helperinit__(self, formalclass):
MetaHelper.__helperinit__(self, formalclass)
self.__lock__ = Lock()
class LockingMetaClass(MetaClass):
__helper__ = LockingHelper
Locking = LockingMetaClass('Locking', (), {})
def _test():
# For kicks, take away the Locking base class and see it die
class Buffer(Locking):
def __init__(self, initialsize):
assert initialsize > 0
self.size = initialsize
self.buffer = [None]*self.size
self.first = self.last = 0
def put(self, item):
# Do we need to grow the buffer?
if (self.last+1) % self.size != self.first:
# Insert the new item
self.buffer[self.last] = item
self.last = (self.last+1) % self.size
return
# Double the buffer size
# First normalize it so that first==0 and last==size-1
print("buffer =", self.buffer)
print("first = %d, last = %d, size = %d" % (
self.first, self.last, self.size))
if self.first <= self.last:
temp = self.buffer[self.first:self.last]
else:
temp = self.buffer[self.first:] + self.buffer[:self.last]
print("temp =", temp)
self.buffer = temp + [None]*(self.size+1)
self.first = 0
self.last = self.size-1
self.size = self.size*2
print("Buffer size doubled to", self.size)
print("new buffer =", self.buffer)
print("first = %d, last = %d, size = %d" % (
self.first, self.last, self.size))
self.put(item) # Recursive call to test the locking
def get(self):
# Is the buffer empty?
if self.first == self.last:
raise EOFError # Avoid defining a new exception
item = self.buffer[self.first]
self.first = (self.first+1) % self.size
return item
def producer(buffer, wait, n=1000):
import time
i = 0
while i < n:
print("put", i)
buffer.put(i)
i = i+1
print("Producer: done producing", n, "items")
wait.release()
def consumer(buffer, wait, n=1000):
import time
i = 0
tout = 0.001
while i < n:
try:
x = buffer.get()
if x != i:
raise AssertionError("get() returned %s, expected %s" % (x, i))
print("got", i)
i = i+1
tout = 0.001
except EOFError:
time.sleep(tout)
tout = tout*2
print("Consumer: done consuming", n, "items")
wait.release()
pwait = thread.allocate_lock()
pwait.acquire()
cwait = thread.allocate_lock()
cwait.acquire()
buffer = Buffer(1)
n = 1000
thread.start_new_thread(consumer, (buffer, cwait, n))
thread.start_new_thread(producer, (buffer, pwait, n))
pwait.acquire()
print("Producer done")
cwait.acquire()
print("All done")
print("buffer size ==", len(buffer.buffer))
if __name__ == '__main__':
_testLock()
_test()

View File

@ -1,144 +0,0 @@
"""Tracing metaclass.
XXX This is very much a work in progress.
"""
import types, sys
class TraceMetaClass:
"""Metaclass for tracing.
Classes defined using this metaclass have an automatic tracing
feature -- by setting the __trace_output__ instance (or class)
variable to a file object, trace messages about all calls are
written to the file. The trace formatting can be changed by
defining a suitable __trace_call__ method.
"""
__inited = 0
def __init__(self, name, bases, dict):
self.__name__ = name
self.__bases__ = bases
self.__dict = dict
# XXX Can't define __dict__, alas
self.__inited = 1
def __getattr__(self, name):
try:
return self.__dict[name]
except KeyError:
for base in self.__bases__:
try:
return base.__getattr__(name)
except AttributeError:
pass
raise AttributeError(name)
def __setattr__(self, name, value):
if not self.__inited:
self.__dict__[name] = value
else:
self.__dict[name] = value
def __call__(self, *args, **kw):
inst = TracingInstance()
inst.__meta_init__(self)
try:
init = inst.__getattr__('__init__')
except AttributeError:
init = lambda: None
init(*args, **kw)
return inst
__trace_output__ = None
class TracingInstance:
"""Helper class to represent an instance of a tracing class."""
def __trace_call__(self, fp, fmt, *args):
fp.write((fmt+'\n') % args)
def __meta_init__(self, klass):
self.__class = klass
def __getattr__(self, name):
# Invoked for any attr not in the instance's __dict__
try:
raw = self.__class.__getattr__(name)
except AttributeError:
raise AttributeError(name)
if type(raw) != types.FunctionType:
return raw
# It's a function
fullname = self.__class.__name__ + "." + name
if not self.__trace_output__ or name == '__trace_call__':
return NotTracingWrapper(fullname, raw, self)
else:
return TracingWrapper(fullname, raw, self)
class NotTracingWrapper:
def __init__(self, name, func, inst):
self.__name__ = name
self.func = func
self.inst = inst
def __call__(self, *args, **kw):
return self.func(self.inst, *args, **kw)
class TracingWrapper(NotTracingWrapper):
def __call__(self, *args, **kw):
self.inst.__trace_call__(self.inst.__trace_output__,
"calling %s, inst=%s, args=%s, kw=%s",
self.__name__, self.inst, args, kw)
try:
rv = self.func(self.inst, *args, **kw)
except:
t, v, tb = sys.exc_info()
self.inst.__trace_call__(self.inst.__trace_output__,
"returning from %s with exception %s: %s",
self.__name__, t, v)
raise t(v).with_traceback(tb)
else:
self.inst.__trace_call__(self.inst.__trace_output__,
"returning from %s with value %s",
self.__name__, rv)
return rv
Traced = TraceMetaClass('Traced', (), {'__trace_output__': None})
def _test():
global C, D
class C(Traced):
def __init__(self, x=0): self.x = x
def m1(self, x): self.x = x
def m2(self, y): return self.x + y
__trace_output__ = sys.stdout
class D(C):
def m2(self, y): print("D.m2(%r)" % (y,)); return C.m2(self, y)
__trace_output__ = None
x = C(4321)
print(x)
print(x.x)
print(x.m1(100))
print(x.m1(10))
print(x.m2(33))
print(x.m1(5))
print(x.m2(4000))
print(x.x)
print(C.__init__)
print(C.m2)
print(D.__init__)
print(D.m2)
y = D()
print(y)
print(y.m1(10))
print(y.m2(100))
print(y.x)
if __name__ == '__main__':
_test()

View File

@ -1,605 +0,0 @@
<HTML>
<HEAD>
<TITLE>Metaclasses in Python 1.5</TITLE>
</HEAD>
<BODY BGCOLOR="FFFFFF">
<H1>Metaclasses in Python 1.5</H1>
<H2>(A.k.a. The Killer Joke :-)</H2>
<HR>
(<i>Postscript:</i> reading this essay is probably not the best way to
understand the metaclass hook described here. See a <A
HREF="meta-vladimir.txt">message posted by Vladimir Marangozov</A>
which may give a gentler introduction to the matter. You may also
want to search Deja News for messages with "metaclass" in the subject
posted to comp.lang.python in July and August 1998.)
<HR>
<P>In previous Python releases (and still in 1.5), there is something
called the ``Don Beaudry hook'', after its inventor and champion.
This allows C extensions to provide alternate class behavior, thereby
allowing the Python class syntax to be used to define other class-like
entities. Don Beaudry has used this in his infamous <A
HREF="http://maigret.cog.brown.edu/pyutil/">MESS</A> package; Jim
Fulton has used it in his <A
HREF="http://www.digicool.com/releases/ExtensionClass/">Extension
Classes</A> package. (It has also been referred to as the ``Don
Beaudry <i>hack</i>,'' but that's a misnomer. There's nothing hackish
about it -- in fact, it is rather elegant and deep, even though
there's something dark to it.)
<P>(On first reading, you may want to skip directly to the examples in
the section "Writing Metaclasses in Python" below, unless you want
your head to explode.)
<P>
<HR>
<P>Documentation of the Don Beaudry hook has purposefully been kept
minimal, since it is a feature of incredible power, and is easily
abused. Basically, it checks whether the <b>type of the base
class</b> is callable, and if so, it is called to create the new
class.
<P>Note the two indirection levels. Take a simple example:
<PRE>
class B:
pass
class C(B):
pass
</PRE>
Take a look at the second class definition, and try to fathom ``the
type of the base class is callable.''
<P>(Types are not classes, by the way. See questions 4.2, 4.19 and in
particular 6.22 in the <A
HREF="http://www.python.org/cgi-bin/faqw.py" >Python FAQ</A>
for more on this topic.)
<P>
<UL>
<LI>The <b>base class</b> is B; this one's easy.<P>
<LI>Since B is a class, its type is ``class''; so the <b>type of the
base class</b> is the type ``class''. This is also known as
types.ClassType, assuming the standard module <code>types</code> has
been imported.<P>
<LI>Now is the type ``class'' <b>callable</b>? No, because types (in
core Python) are never callable. Classes are callable (calling a
class creates a new instance) but types aren't.<P>
</UL>
<P>So our conclusion is that in our example, the type of the base
class (of C) is not callable. So the Don Beaudry hook does not apply,
and the default class creation mechanism is used (which is also used
when there is no base class). In fact, the Don Beaudry hook never
applies when using only core Python, since the type of a core object
is never callable.
<P>So what do Don and Jim do in order to use Don's hook? Write an
extension that defines at least two new Python object types. The
first would be the type for ``class-like'' objects usable as a base
class, to trigger Don's hook. This type must be made callable.
That's why we need a second type. Whether an object is callable
depends on its type. So whether a type object is callable depends on
<i>its</i> type, which is a <i>meta-type</i>. (In core Python there
is only one meta-type, the type ``type'' (types.TypeType), which is
the type of all type objects, even itself.) A new meta-type must
be defined that makes the type of the class-like objects callable.
(Normally, a third type would also be needed, the new ``instance''
type, but this is not an absolute requirement -- the new class type
could return an object of some existing type when invoked to create an
instance.)
<P>Still confused? Here's a simple device due to Don himself to
explain metaclasses. Take a simple class definition; assume B is a
special class that triggers Don's hook:
<PRE>
class C(B):
a = 1
b = 2
</PRE>
This can be though of as equivalent to:
<PRE>
C = type(B)('C', (B,), {'a': 1, 'b': 2})
</PRE>
If that's too dense for you, here's the same thing written out using
temporary variables:
<PRE>
creator = type(B) # The type of the base class
name = 'C' # The name of the new class
bases = (B,) # A tuple containing the base class(es)
namespace = {'a': 1, 'b': 2} # The namespace of the class statement
C = creator(name, bases, namespace)
</PRE>
This is analogous to what happens without the Don Beaudry hook, except
that in that case the creator function is set to the default class
creator.
<P>In either case, the creator is called with three arguments. The
first one, <i>name</i>, is the name of the new class (as given at the
top of the class statement). The <i>bases</i> argument is a tuple of
base classes (a singleton tuple if there's only one base class, like
the example). Finally, <i>namespace</i> is a dictionary containing
the local variables collected during execution of the class statement.
<P>Note that the contents of the namespace dictionary is simply
whatever names were defined in the class statement. A little-known
fact is that when Python executes a class statement, it enters a new
local namespace, and all assignments and function definitions take
place in this namespace. Thus, after executing the following class
statement:
<PRE>
class C:
a = 1
def f(s): pass
</PRE>
the class namespace's contents would be {'a': 1, 'f': &lt;function f
...&gt;}.
<P>But enough already about writing Python metaclasses in C; read the
documentation of <A
HREF="http://maigret.cog.brown.edu/pyutil/">MESS</A> or <A
HREF="http://www.digicool.com/papers/ExtensionClass.html" >Extension
Classes</A> for more information.
<P>
<HR>
<H2>Writing Metaclasses in Python</H2>
<P>In Python 1.5, the requirement to write a C extension in order to
write metaclasses has been dropped (though you can still do
it, of course). In addition to the check ``is the type of the base
class callable,'' there's a check ``does the base class have a
__class__ attribute.'' If so, it is assumed that the __class__
attribute refers to a class.
<P>Let's repeat our simple example from above:
<PRE>
class C(B):
a = 1
b = 2
</PRE>
Assuming B has a __class__ attribute, this translates into:
<PRE>
C = B.__class__('C', (B,), {'a': 1, 'b': 2})
</PRE>
This is exactly the same as before except that instead of type(B),
B.__class__ is invoked. If you have read <A HREF=
"http://www.python.org/cgi-bin/faqw.py?req=show&file=faq06.022.htp"
>FAQ question 6.22</A> you will understand that while there is a big
technical difference between type(B) and B.__class__, they play the
same role at different abstraction levels. And perhaps at some point
in the future they will really be the same thing (at which point you
would be able to derive subclasses from built-in types).
<P>At this point it may be worth mentioning that C.__class__ is the
same object as B.__class__, i.e., C's metaclass is the same as B's
metaclass. In other words, subclassing an existing class creates a
new (meta)inststance of the base class's metaclass.
<P>Going back to the example, the class B.__class__ is instantiated,
passing its constructor the same three arguments that are passed to
the default class constructor or to an extension's metaclass:
<i>name</i>, <i>bases</i>, and <i>namespace</i>.
<P>It is easy to be confused by what exactly happens when using a
metaclass, because we lose the absolute distinction between classes
and instances: a class is an instance of a metaclass (a
``metainstance''), but technically (i.e. in the eyes of the python
runtime system), the metaclass is just a class, and the metainstance
is just an instance. At the end of the class statement, the metaclass
whose metainstance is used as a base class is instantiated, yielding a
second metainstance (of the same metaclass). This metainstance is
then used as a (normal, non-meta) class; instantiation of the class
means calling the metainstance, and this will return a real instance.
And what class is that an instance of? Conceptually, it is of course
an instance of our metainstance; but in most cases the Python runtime
system will see it as an instance of a a helper class used by the
metaclass to implement its (non-meta) instances...
<P>Hopefully an example will make things clearer. Let's presume we
have a metaclass MetaClass1. It's helper class (for non-meta
instances) is callled HelperClass1. We now (manually) instantiate
MetaClass1 once to get an empty special base class:
<PRE>
BaseClass1 = MetaClass1("BaseClass1", (), {})
</PRE>
We can now use BaseClass1 as a base class in a class statement:
<PRE>
class MySpecialClass(BaseClass1):
i = 1
def f(s): pass
</PRE>
At this point, MySpecialClass is defined; it is a metainstance of
MetaClass1 just like BaseClass1, and in fact the expression
``BaseClass1.__class__ == MySpecialClass.__class__ == MetaClass1''
yields true.
<P>We are now ready to create instances of MySpecialClass. Let's
assume that no constructor arguments are required:
<PRE>
x = MySpecialClass()
y = MySpecialClass()
print x.__class__, y.__class__
</PRE>
The print statement shows that x and y are instances of HelperClass1.
How did this happen? MySpecialClass is an instance of MetaClass1
(``meta'' is irrelevant here); when an instance is called, its
__call__ method is invoked, and presumably the __call__ method defined
by MetaClass1 returns an instance of HelperClass1.
<P>Now let's see how we could use metaclasses -- what can we do
with metaclasses that we can't easily do without them? Here's one
idea: a metaclass could automatically insert trace calls for all
method calls. Let's first develop a simplified example, without
support for inheritance or other ``advanced'' Python features (we'll
add those later).
<PRE>
import types
class Tracing:
def __init__(self, name, bases, namespace):
"""Create a new class."""
self.__name__ = name
self.__bases__ = bases
self.__namespace__ = namespace
def __call__(self):
"""Create a new instance."""
return Instance(self)
class Instance:
def __init__(self, klass):
self.__klass__ = klass
def __getattr__(self, name):
try:
value = self.__klass__.__namespace__[name]
except KeyError:
raise AttributeError, name
if type(value) is not types.FunctionType:
return value
return BoundMethod(value, self)
class BoundMethod:
def __init__(self, function, instance):
self.function = function
self.instance = instance
def __call__(self, *args):
print "calling", self.function, "for", self.instance, "with", args
return apply(self.function, (self.instance,) + args)
Trace = Tracing('Trace', (), {})
class MyTracedClass(Trace):
def method1(self, a):
self.a = a
def method2(self):
return self.a
aninstance = MyTracedClass()
aninstance.method1(10)
print "the answer is %d" % aninstance.method2()
</PRE>
Confused already? The intention is to read this from top down. The
Tracing class is the metaclass we're defining. Its structure is
really simple.
<P>
<UL>
<LI>The __init__ method is invoked when a new Tracing instance is
created, e.g. the definition of class MyTracedClass later in the
example. It simply saves the class name, base classes and namespace
as instance variables.<P>
<LI>The __call__ method is invoked when a Tracing instance is called,
e.g. the creation of aninstance later in the example. It returns an
instance of the class Instance, which is defined next.<P>
</UL>
<P>The class Instance is the class used for all instances of classes
built using the Tracing metaclass, e.g. aninstance. It has two
methods:
<P>
<UL>
<LI>The __init__ method is invoked from the Tracing.__call__ method
above to initialize a new instance. It saves the class reference as
an instance variable. It uses a funny name because the user's
instance variables (e.g. self.a later in the example) live in the same
namespace.<P>
<LI>The __getattr__ method is invoked whenever the user code
references an attribute of the instance that is not an instance
variable (nor a class variable; but except for __init__ and
__getattr__ there are no class variables). It will be called, for
example, when aninstance.method1 is referenced in the example, with
self set to aninstance and name set to the string "method1".<P>
</UL>
<P>The __getattr__ method looks the name up in the __namespace__
dictionary. If it isn't found, it raises an AttributeError exception.
(In a more realistic example, it would first have to look through the
base classes as well.) If it is found, there are two possibilities:
it's either a function or it isn't. If it's not a function, it is
assumed to be a class variable, and its value is returned. If it's a
function, we have to ``wrap'' it in instance of yet another helper
class, BoundMethod.
<P>The BoundMethod class is needed to implement a familiar feature:
when a method is defined, it has an initial argument, self, which is
automatically bound to the relevant instance when it is called. For
example, aninstance.method1(10) is equivalent to method1(aninstance,
10). In the example if this call, first a temporary BoundMethod
instance is created with the following constructor call: temp =
BoundMethod(method1, aninstance); then this instance is called as
temp(10). After the call, the temporary instance is discarded.
<P>
<UL>
<LI>The __init__ method is invoked for the constructor call
BoundMethod(method1, aninstance). It simply saves away its
arguments.<P>
<LI>The __call__ method is invoked when the bound method instance is
called, as in temp(10). It needs to call method1(aninstance, 10).
However, even though self.function is now method1 and self.instance is
aninstance, it can't call self.function(self.instance, args) directly,
because it should work regardless of the number of arguments passed.
(For simplicity, support for keyword arguments has been omitted.)<P>
</UL>
<P>In order to be able to support arbitrary argument lists, the
__call__ method first constructs a new argument tuple. Conveniently,
because of the notation *args in __call__'s own argument list, the
arguments to __call__ (except for self) are placed in the tuple args.
To construct the desired argument list, we concatenate a singleton
tuple containing the instance with the args tuple: (self.instance,) +
args. (Note the trailing comma used to construct the singleton
tuple.) In our example, the resulting argument tuple is (aninstance,
10).
<P>The intrinsic function apply() takes a function and an argument
tuple and calls the function for it. In our example, we are calling
apply(method1, (aninstance, 10)) which is equivalent to calling
method(aninstance, 10).
<P>From here on, things should come together quite easily. The output
of the example code is something like this:
<PRE>
calling &lt;function method1 at ae8d8&gt; for &lt;Instance instance at 95ab0&gt; with (10,)
calling &lt;function method2 at ae900&gt; for &lt;Instance instance at 95ab0&gt; with ()
the answer is 10
</PRE>
<P>That was about the shortest meaningful example that I could come up
with. A real tracing metaclass (for example, <A
HREF="#Trace">Trace.py</A> discussed below) needs to be more
complicated in two dimensions.
<P>First, it needs to support more advanced Python features such as
class variables, inheritance, __init__ methods, and keyword arguments.
<P>Second, it needs to provide a more flexible way to handle the
actual tracing information; perhaps it should be possible to write
your own tracing function that gets called, perhaps it should be
possible to enable and disable tracing on a per-class or per-instance
basis, and perhaps a filter so that only interesting calls are traced;
it should also be able to trace the return value of the call (or the
exception it raised if an error occurs). Even the Trace.py example
doesn't support all these features yet.
<P>
<HR>
<H1>Real-life Examples</H1>
<P>Have a look at some very preliminary examples that I coded up to
teach myself how to write metaclasses:
<DL>
<DT><A HREF="Enum.py">Enum.py</A>
<DD>This (ab)uses the class syntax as an elegant way to define
enumerated types. The resulting classes are never instantiated --
rather, their class attributes are the enumerated values. For
example:
<PRE>
class Color(Enum):
red = 1
green = 2
blue = 3
print Color.red
</PRE>
will print the string ``Color.red'', while ``Color.red==1'' is true,
and ``Color.red + 1'' raise a TypeError exception.
<P>
<DT><A NAME=Trace></A><A HREF="Trace.py">Trace.py</A>
<DD>The resulting classes work much like standard
classes, but by setting a special class or instance attribute
__trace_output__ to point to a file, all calls to the class's methods
are traced. It was a bit of a struggle to get this right. This
should probably redone using the generic metaclass below.
<P>
<DT><A HREF="Meta.py">Meta.py</A>
<DD>A generic metaclass. This is an attempt at finding out how much
standard class behavior can be mimicked by a metaclass. The
preliminary answer appears to be that everything's fine as long as the
class (or its clients) don't look at the instance's __class__
attribute, nor at the class's __dict__ attribute. The use of
__getattr__ internally makes the classic implementation of __getattr__
hooks tough; we provide a similar hook _getattr_ instead.
(__setattr__ and __delattr__ are not affected.)
(XXX Hm. Could detect presence of __getattr__ and rename it.)
<P>
<DT><A HREF="Eiffel.py">Eiffel.py</A>
<DD>Uses the above generic metaclass to implement Eiffel style
pre-conditions and post-conditions.
<P>
<DT><A HREF="Synch.py">Synch.py</A>
<DD>Uses the above generic metaclass to implement synchronized
methods.
<P>
<DT><A HREF="Simple.py">Simple.py</A>
<DD>The example module used above.
<P>
</DL>
<P>A pattern seems to be emerging: almost all these uses of
metaclasses (except for Enum, which is probably more cute than useful)
mostly work by placing wrappers around method calls. An obvious
problem with that is that it's not easy to combine the features of
different metaclasses, while this would actually be quite useful: for
example, I wouldn't mind getting a trace from the test run of the
Synch module, and it would be interesting to add preconditions to it
as well. This needs more research. Perhaps a metaclass could be
provided that allows stackable wrappers...
<P>
<HR>
<H2>Things You Could Do With Metaclasses</H2>
<P>There are lots of things you could do with metaclasses. Most of
these can also be done with creative use of __getattr__, but
metaclasses make it easier to modify the attribute lookup behavior of
classes. Here's a partial list.
<P>
<UL>
<LI>Enforce different inheritance semantics, e.g. automatically call
base class methods when a derived class overrides<P>
<LI>Implement class methods (e.g. if the first argument is not named
'self')<P>
<LI>Implement that each instance is initialized with <b>copies</b> of
all class variables<P>
<LI>Implement a different way to store instance variables (e.g. in a
list kept outside the instance but indexed by the instance's id())<P>
<LI>Automatically wrap or trap all or certain methods
<UL>
<LI>for tracing
<LI>for precondition and postcondition checking
<LI>for synchronized methods
<LI>for automatic value caching
</UL>
<P>
<LI>When an attribute is a parameterless function, call it on
reference (to mimic it being an instance variable); same on assignment<P>
<LI>Instrumentation: see how many times various attributes are used<P>
<LI>Different semantics for __setattr__ and __getattr__ (e.g. disable
them when they are being used recursively)<P>
<LI>Abuse class syntax for other things<P>
<LI>Experiment with automatic type checking<P>
<LI>Delegation (or acquisition)<P>
<LI>Dynamic inheritance patterns<P>
<LI>Automatic caching of methods<P>
</UL>
<P>
<HR>
<H4>Credits</H4>
<P>Many thanks to David Ascher and Donald Beaudry for their comments
on earlier draft of this paper. Also thanks to Matt Conway and Tommy
Burnette for putting a seed for the idea of metaclasses in my
mind, nearly three years ago, even though at the time my response was
``you can do that with __getattr__ hooks...'' :-)
<P>
<HR>
</BODY>
</HTML>

View File

@ -1,256 +0,0 @@
Subject: Re: The metaclass saga using Python
From: Vladimir Marangozov <Vladimir.Marangozov@imag.fr>
To: tim_one@email.msn.com (Tim Peters)
Cc: python-list@cwi.nl
Date: Wed, 5 Aug 1998 15:59:06 +0200 (DFT)
[Tim]
>
> building-on-examples-tends-to-prevent-abstract-thrashing-ly y'rs - tim
>
OK, I stand corrected. I understand that anybody's interpretation of
the meta-class concept is likely to be difficult to digest by others.
Here's another try, expressing the same thing, but using the Python
programming model, examples and, perhaps, more popular terms.
1. Classes.
This is pure Python of today. Sorry about the tutorial, but it is
meant to illustrate the second part, which is the one we're
interested in and which will follow the same development scenario.
Besides, newbies are likely to understand that the discussion is
affordable even for them :-)
a) Class definition
A class is meant to define the common properties of a set of objects.
A class is a "package" of properties. The assembly of properties
in a class package is sometimes called a class structure (which isn't
always appropriate).
>>> class A:
attr1 = "Hello" # an attribute of A
def method1(self, *args): pass # method1 of A
def method2(self, *args): pass # method2 of A
>>>
So far, we defined the structure of the class A. The class A is
of type <class>. We can check this by asking Python: "what is A?"
>>> A # What is A?
<class __main__.A at 2023e360>
b) Class instantiation
Creating an object with the properties defined in the class A is
called instantiation of the class A. After an instantiation of A, we
obtain a new object, called an instance, which has the properties
packaged in the class A.
>>> a = A() # 'a' is the 1st instance of A
>>> a # What is 'a'?
<__main__.A instance at 2022b9d0>
>>> b = A() # 'b' is another instance of A
>>> b # What is 'b'?
<__main__.A instance at 2022b9c0>
The objects, 'a' and 'b', are of type <instance> and they both have
the same properties. Note, that 'a' and 'b' are different objects.
(their adresses differ). This is a bit hard to see, so let's ask Python:
>>> a == b # Is 'a' the same object as 'b'?
0 # No.
Instance objects have one more special property, indicating the class
they are an instance of. This property is named __class__.
>>> a.__class__ # What is the class of 'a'?
<class __main__.A at 2023e360> # 'a' is an instance of A
>>> b.__class__ # What is the class of 'b'?
<class __main__.A at 2023e360> # 'b' is an instance of A
>>> a.__class__ == b.__class__ # Is it really the same class A?
1 # Yes.
c) Class inheritance (class composition and specialization)
Classes can be defined in terms of other existing classes (and only
classes! -- don't bug me on this now). Thus, we can compose property
packages and create new ones. We reuse the property set defined
in a class by defining a new class, which "inherits" from the former.
In other words, a class B which inherits from the class A, inherits
the properties defined in A, or, B inherits the structure of A.
In the same time, at the definition of the new class B, we can enrich
the inherited set of properties by adding new ones and/or modify some
of the inherited properties.
>>> class B(A): # B inherits A's properties
attr2 = "World" # additional attr2
def method2(self, arg1): pass # method2 is redefined
def method3(self, *args): pass # additional method3
>>> B # What is B?
<class __main__.B at 2023e500>
>>> B == A # Is B the same class as A?
0 # No.
Classes define one special property, indicating whether a class
inherits the properties of another class. This property is called
__bases__ and it contains a list (a tuple) of the classes the new
class inherits from. The classes from which a class is inheriting the
properties are called superclasses (in Python, we call them also --
base classes).
>>> A.__bases__ # Does A have any superclasses?
() # No.
>>> B.__bases__ # Does B have any superclasses?
(<class __main__.A at 2023e360>,) # Yes. It has one superclass.
>>> B.__bases__[0] == A # Is it really the class A?
1 # Yes, it is.
--------
Congratulations on getting this far! This was the hard part.
Now, let's continue with the easy one.
--------
2. Meta-classes
You have to admit, that an anonymous group of Python wizards are
not satisfied with the property packaging facilities presented above.
They say, that the Real-World bugs them with problems that cannot be
modelled successfully with classes. Or, that the way classes are
implemented in Python and the way classes and instances behave at
runtime isn't always appropriate for reproducing the Real-World's
behavior in a way that satisfies them.
Hence, what they want is the following:
a) leave objects as they are (instances of classes)
b) leave classes as they are (property packages and object creators)
BUT, at the same time:
c) consider classes as being instances of mysterious objects.
d) label mysterious objects "meta-classes".
Easy, eh?
You may ask: "Why on earth do they want to do that?".
They answer: "Poor soul... Go and see how cruel the Real-World is!".
You - fuzzy: "OK, will do!"
And here we go for another round of what I said in section 1 -- Classes.
However, be warned! The features we're going to talk about aren't fully
implemented yet, because the Real-World don't let wizards to evaluate
precisely how cruel it is, so the features are still highly-experimental.
a) Meta-class definition
A meta-class is meant to define the common properties of a set of
classes. A meta-class is a "package" of properties. The assembly
of properties in a meta-class package is sometimes called a meta-class
structure (which isn't always appropriate).
In Python, a meta-class definition would have looked like this:
>>> metaclass M:
attr1 = "Hello" # an attribute of M
def method1(self, *args): pass # method1 of M
def method2(self, *args): pass # method2 of M
>>>
So far, we defined the structure of the meta-class M. The meta-class
M is of type <metaclass>. We cannot check this by asking Python, but
if we could, it would have answered:
>>> M # What is M?
<metaclass __main__.M at 2023e4e0>
b) Meta-class instantiation
Creating an object with the properties defined in the meta-class M is
called instantiation of the meta-class M. After an instantiation of M,
we obtain a new object, called an class, but now it is called also
a meta-instance, which has the properties packaged in the meta-class M.
In Python, instantiating a meta-class would have looked like this:
>>> A = M() # 'A' is the 1st instance of M
>>> A # What is 'A'?
<class __main__.A at 2022b9d0>
>>> B = M() # 'B' is another instance of M
>>> B # What is 'B'?
<class __main__.B at 2022b9c0>
The metaclass-instances, A and B, are of type <class> and they both
have the same properties. Note, that A and B are different objects.
(their adresses differ). This is a bit hard to see, but if it was
possible to ask Python, it would have answered:
>>> A == B # Is A the same class as B?
0 # No.
Class objects have one more special property, indicating the meta-class
they are an instance of. This property is named __metaclass__.
>>> A.__metaclass__ # What is the meta-class of A?
<metaclass __main__.M at 2023e4e0> # A is an instance of M
>>> A.__metaclass__ # What is the meta-class of B?
<metaclass __main__.M at 2023e4e0> # B is an instance of M
>>> A.__metaclass__ == B.__metaclass__ # Is it the same meta-class M?
1 # Yes.
c) Meta-class inheritance (meta-class composition and specialization)
Meta-classes can be defined in terms of other existing meta-classes
(and only meta-classes!). Thus, we can compose property packages and
create new ones. We reuse the property set defined in a meta-class by
defining a new meta-class, which "inherits" from the former.
In other words, a meta-class N which inherits from the meta-class M,
inherits the properties defined in M, or, N inherits the structure of M.
In the same time, at the definition of the new meta-class N, we can
enrich the inherited set of properties by adding new ones and/or modify
some of the inherited properties.
>>> metaclass N(M): # N inherits M's properties
attr2 = "World" # additional attr2
def method2(self, arg1): pass # method2 is redefined
def method3(self, *args): pass # additional method3
>>> N # What is N?
<metaclass __main__.N at 2023e500>
>>> N == M # Is N the same meta-class as M?
0 # No.
Meta-classes define one special property, indicating whether a
meta-class inherits the properties of another meta-class. This property
is called __metabases__ and it contains a list (a tuple) of the
meta-classes the new meta-class inherits from. The meta-classes from
which a meta-class is inheriting the properties are called
super-meta-classes (in Python, we call them also -- super meta-bases).
>>> M.__metabases__ # Does M have any supermetaclasses?
() # No.
>>> N.__metabases__ # Does N have any supermetaclasses?
(<metaclass __main__.M at 2023e360>,) # Yes. It has a supermetaclass.
>>> N.__metabases__[0] == M # Is it really the meta-class M?
1 # Yes, it is.
--------
Triple congratulations on getting this far!
Now you know everything about meta-classes and the Real-World!
<unless-wizards-want-meta-classes-be-instances-of-mysterious-objects!>
--
Vladimir MARANGOZOV | Vladimir.Marangozov@inrialpes.fr
http://sirac.inrialpes.fr/~marangoz | tel:(+33-4)76615277 fax:76615252