2002-08-10 18:20:54 -03:00
|
|
|
from test.test_support import verify, verbose, TestFailed, vereq
|
2001-07-12 10:25:53 -03:00
|
|
|
import sys
|
2000-06-30 02:02:53 -03:00
|
|
|
import gc
|
2004-10-30 20:09:22 -03:00
|
|
|
import weakref
|
2000-06-30 02:02:53 -03:00
|
|
|
|
2001-10-02 16:49:47 -03:00
|
|
|
def expect(actual, expected, name):
|
|
|
|
if actual != expected:
|
2003-04-08 13:39:48 -03:00
|
|
|
raise TestFailed, "test_%s: actual %r, expected %r" % (
|
2001-10-02 16:49:47 -03:00
|
|
|
name, actual, expected)
|
|
|
|
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
def expect_nonzero(actual, name):
|
|
|
|
if actual == 0:
|
|
|
|
raise TestFailed, "test_%s: unexpected zero" % name
|
2001-10-02 16:49:47 -03:00
|
|
|
|
2000-09-22 12:26:20 -03:00
|
|
|
def run_test(name, thunk):
|
|
|
|
if verbose:
|
|
|
|
print "testing %s..." % name,
|
2001-10-02 16:49:47 -03:00
|
|
|
thunk()
|
|
|
|
if verbose:
|
|
|
|
print "ok"
|
2000-09-22 12:26:20 -03:00
|
|
|
|
2000-06-30 02:02:53 -03:00
|
|
|
def test_list():
|
|
|
|
l = []
|
|
|
|
l.append(l)
|
|
|
|
gc.collect()
|
|
|
|
del l
|
2001-10-02 16:49:47 -03:00
|
|
|
expect(gc.collect(), 1, "list")
|
2000-06-30 02:02:53 -03:00
|
|
|
|
|
|
|
def test_dict():
|
|
|
|
d = {}
|
|
|
|
d[1] = d
|
|
|
|
gc.collect()
|
|
|
|
del d
|
2001-10-02 16:49:47 -03:00
|
|
|
expect(gc.collect(), 1, "dict")
|
2000-06-30 02:02:53 -03:00
|
|
|
|
|
|
|
def test_tuple():
|
2000-09-15 19:32:29 -03:00
|
|
|
# since tuples are immutable we close the loop with a list
|
2000-06-30 02:02:53 -03:00
|
|
|
l = []
|
|
|
|
t = (l,)
|
|
|
|
l.append(t)
|
|
|
|
gc.collect()
|
|
|
|
del t
|
|
|
|
del l
|
2001-10-02 16:49:47 -03:00
|
|
|
expect(gc.collect(), 2, "tuple")
|
2000-06-30 02:02:53 -03:00
|
|
|
|
|
|
|
def test_class():
|
|
|
|
class A:
|
|
|
|
pass
|
|
|
|
A.a = A
|
|
|
|
gc.collect()
|
|
|
|
del A
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
expect_nonzero(gc.collect(), "class")
|
|
|
|
|
2001-10-15 19:49:27 -03:00
|
|
|
def test_newstyleclass():
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
class A(object):
|
2001-10-15 19:49:27 -03:00
|
|
|
pass
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
gc.collect()
|
|
|
|
del A
|
|
|
|
expect_nonzero(gc.collect(), "staticclass")
|
|
|
|
|
2000-06-30 02:02:53 -03:00
|
|
|
def test_instance():
|
|
|
|
class A:
|
|
|
|
pass
|
|
|
|
a = A()
|
|
|
|
a.a = a
|
|
|
|
gc.collect()
|
|
|
|
del a
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
expect_nonzero(gc.collect(), "instance")
|
2000-06-30 02:02:53 -03:00
|
|
|
|
2001-10-05 17:51:39 -03:00
|
|
|
def test_newinstance():
|
|
|
|
class A(object):
|
|
|
|
pass
|
|
|
|
a = A()
|
|
|
|
a.a = a
|
|
|
|
gc.collect()
|
|
|
|
del a
|
|
|
|
expect_nonzero(gc.collect(), "newinstance")
|
|
|
|
class B(list):
|
|
|
|
pass
|
|
|
|
class C(B, A):
|
|
|
|
pass
|
|
|
|
a = C()
|
|
|
|
a.a = a
|
|
|
|
gc.collect()
|
|
|
|
del a
|
|
|
|
expect_nonzero(gc.collect(), "newinstance(2)")
|
2002-06-12 11:38:04 -03:00
|
|
|
del B, C
|
|
|
|
expect_nonzero(gc.collect(), "newinstance(3)")
|
|
|
|
A.a = A()
|
|
|
|
del A
|
|
|
|
expect_nonzero(gc.collect(), "newinstance(4)")
|
|
|
|
expect(gc.collect(), 0, "newinstance(5)")
|
2001-10-05 17:51:39 -03:00
|
|
|
|
2000-06-30 02:02:53 -03:00
|
|
|
def test_method():
|
2000-09-15 19:32:29 -03:00
|
|
|
# Tricky: self.__init__ is a bound method, it references the instance.
|
2000-06-30 02:02:53 -03:00
|
|
|
class A:
|
|
|
|
def __init__(self):
|
|
|
|
self.init = self.__init__
|
|
|
|
a = A()
|
|
|
|
gc.collect()
|
|
|
|
del a
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
expect_nonzero(gc.collect(), "method")
|
2000-06-30 02:02:53 -03:00
|
|
|
|
|
|
|
def test_finalizer():
|
2000-09-15 19:32:29 -03:00
|
|
|
# A() is uncollectable if it is part of a cycle, make sure it shows up
|
|
|
|
# in gc.garbage.
|
2000-06-30 02:02:53 -03:00
|
|
|
class A:
|
|
|
|
def __del__(self): pass
|
|
|
|
class B:
|
|
|
|
pass
|
|
|
|
a = A()
|
|
|
|
a.a = a
|
|
|
|
id_a = id(a)
|
|
|
|
b = B()
|
|
|
|
b.b = b
|
|
|
|
gc.collect()
|
|
|
|
del a
|
|
|
|
del b
|
Add Garbage Collection support to new-style classes (not yet to their
instances).
Also added GC support to various auxiliary types: super, property,
descriptors, wrappers, dictproxy. (Only type objects have a tp_clear
field; the other types are.)
One change was necessary to the GC infrastructure. We have statically
allocated type objects that don't have a GC header (and can't easily
be given one) and heap-allocated type objects that do have a GC
header. Giving these different metatypes would be really ugly: I
tried, and I had to modify pickle.py, cPickle.c, copy.py, add a new
invent a new name for the new metatype and make it a built-in, change
affected tests... In short, a mess. So instead, we add a new type
slot tp_is_gc, which is a simple Boolean function that determines
whether a particular instance has GC headers or not. This slot is
only relevant for types that have the (new) GC flag bit set. If the
tp_is_gc slot is NULL (by far the most common case), all instances of
the type are deemed to have GC headers. This slot is called by the
PyObject_IS_GC() macro (which is only used twice, both times in
gcmodule.c).
I also changed the extern declarations for a bunch of GC-related
functions (_PyObject_GC_Del etc.): these always exist but objimpl.h
only declared them when WITH_CYCLE_GC was defined, but I needed to be
able to reference them without #ifdefs. (When WITH_CYCLE_GC is not
defined, they do the same as their non-GC counterparts anyway.)
2001-10-02 18:24:57 -03:00
|
|
|
expect_nonzero(gc.collect(), "finalizer")
|
2000-09-22 12:26:20 -03:00
|
|
|
for obj in gc.garbage:
|
|
|
|
if id(obj) == id_a:
|
|
|
|
del obj.a
|
|
|
|
break
|
|
|
|
else:
|
2001-10-02 16:49:47 -03:00
|
|
|
raise TestFailed, "didn't find obj in garbage (finalizer)"
|
2000-09-22 12:26:20 -03:00
|
|
|
gc.garbage.remove(obj)
|
2000-06-30 02:02:53 -03:00
|
|
|
|
2002-08-09 14:38:16 -03:00
|
|
|
def test_finalizer_newclass():
|
|
|
|
# A() is uncollectable if it is part of a cycle, make sure it shows up
|
|
|
|
# in gc.garbage.
|
|
|
|
class A(object):
|
|
|
|
def __del__(self): pass
|
|
|
|
class B(object):
|
|
|
|
pass
|
|
|
|
a = A()
|
|
|
|
a.a = a
|
|
|
|
id_a = id(a)
|
|
|
|
b = B()
|
|
|
|
b.b = b
|
|
|
|
gc.collect()
|
|
|
|
del a
|
|
|
|
del b
|
|
|
|
expect_nonzero(gc.collect(), "finalizer")
|
|
|
|
for obj in gc.garbage:
|
|
|
|
if id(obj) == id_a:
|
|
|
|
del obj.a
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise TestFailed, "didn't find obj in garbage (finalizer)"
|
|
|
|
gc.garbage.remove(obj)
|
|
|
|
|
2000-06-30 02:02:53 -03:00
|
|
|
def test_function():
|
2000-09-15 19:32:29 -03:00
|
|
|
# Tricky: f -> d -> f, code should call d.clear() after the exec to
|
|
|
|
# break the cycle.
|
2000-06-30 02:02:53 -03:00
|
|
|
d = {}
|
2006-09-06 03:51:57 -03:00
|
|
|
exec("def f(): pass\n", d)
|
2000-06-30 02:02:53 -03:00
|
|
|
gc.collect()
|
|
|
|
del d
|
2001-10-02 16:49:47 -03:00
|
|
|
expect(gc.collect(), 2, "function")
|
2000-09-22 12:26:20 -03:00
|
|
|
|
2001-07-12 10:25:53 -03:00
|
|
|
def test_frame():
|
|
|
|
def f():
|
|
|
|
frame = sys._getframe()
|
|
|
|
gc.collect()
|
|
|
|
f()
|
2001-10-02 16:49:47 -03:00
|
|
|
expect(gc.collect(), 1, "frame")
|
2001-07-12 10:25:53 -03:00
|
|
|
|
|
|
|
|
2000-09-22 12:26:20 -03:00
|
|
|
def test_saveall():
|
|
|
|
# Verify that cyclic garbage like lists show up in gc.garbage if the
|
|
|
|
# SAVEALL option is enabled.
|
2002-08-10 18:20:54 -03:00
|
|
|
|
|
|
|
# First make sure we don't save away other stuff that just happens to
|
|
|
|
# be waiting for collection.
|
|
|
|
gc.collect()
|
|
|
|
vereq(gc.garbage, []) # if this fails, someone else created immortal trash
|
|
|
|
|
2002-08-11 01:15:09 -03:00
|
|
|
L = []
|
|
|
|
L.append(L)
|
|
|
|
id_L = id(L)
|
2002-08-10 18:29:56 -03:00
|
|
|
|
2002-08-10 18:32:16 -03:00
|
|
|
debug = gc.get_debug()
|
|
|
|
gc.set_debug(debug | gc.DEBUG_SAVEALL)
|
2002-08-11 01:15:09 -03:00
|
|
|
del L
|
2000-09-22 12:26:20 -03:00
|
|
|
gc.collect()
|
2002-08-10 18:32:16 -03:00
|
|
|
gc.set_debug(debug)
|
|
|
|
|
|
|
|
vereq(len(gc.garbage), 1)
|
2002-08-11 01:15:09 -03:00
|
|
|
obj = gc.garbage.pop()
|
|
|
|
vereq(id(obj), id_L)
|
2000-06-30 02:02:53 -03:00
|
|
|
|
2000-09-15 19:32:29 -03:00
|
|
|
def test_del():
|
|
|
|
# __del__ methods can trigger collection, make this to happen
|
|
|
|
thresholds = gc.get_threshold()
|
|
|
|
gc.enable()
|
|
|
|
gc.set_threshold(1)
|
|
|
|
|
2000-10-23 14:22:08 -03:00
|
|
|
class A:
|
|
|
|
def __del__(self):
|
|
|
|
dir(self)
|
2000-09-15 19:32:29 -03:00
|
|
|
a = A()
|
|
|
|
del a
|
|
|
|
|
|
|
|
gc.disable()
|
2002-08-11 01:15:09 -03:00
|
|
|
gc.set_threshold(*thresholds)
|
2000-10-23 14:22:08 -03:00
|
|
|
|
2002-08-09 14:38:16 -03:00
|
|
|
def test_del_newclass():
|
|
|
|
# __del__ methods can trigger collection, make this to happen
|
|
|
|
thresholds = gc.get_threshold()
|
|
|
|
gc.enable()
|
|
|
|
gc.set_threshold(1)
|
|
|
|
|
|
|
|
class A(object):
|
|
|
|
def __del__(self):
|
|
|
|
dir(self)
|
|
|
|
a = A()
|
|
|
|
del a
|
|
|
|
|
|
|
|
gc.disable()
|
2002-08-11 01:15:09 -03:00
|
|
|
gc.set_threshold(*thresholds)
|
2002-08-09 14:38:16 -03:00
|
|
|
|
2006-03-07 05:46:03 -04:00
|
|
|
def test_get_count():
|
|
|
|
gc.collect()
|
|
|
|
expect(gc.get_count(), (0, 0, 0), "get_count()")
|
|
|
|
a = dict()
|
|
|
|
expect(gc.get_count(), (1, 0, 0), "get_count()")
|
|
|
|
|
|
|
|
def test_collect_generations():
|
|
|
|
gc.collect()
|
|
|
|
a = dict()
|
|
|
|
gc.collect(0)
|
|
|
|
expect(gc.get_count(), (0, 1, 0), "collect(0)")
|
|
|
|
gc.collect(1)
|
|
|
|
expect(gc.get_count(), (0, 0, 1), "collect(1)")
|
|
|
|
gc.collect(2)
|
|
|
|
expect(gc.get_count(), (0, 0, 0), "collect(1)")
|
|
|
|
|
2002-03-28 17:08:30 -04:00
|
|
|
class Ouch:
|
|
|
|
n = 0
|
|
|
|
def __del__(self):
|
|
|
|
Ouch.n = Ouch.n + 1
|
2002-07-11 16:07:45 -03:00
|
|
|
if Ouch.n % 17 == 0:
|
2002-03-28 17:08:30 -04:00
|
|
|
gc.collect()
|
|
|
|
|
|
|
|
def test_trashcan():
|
|
|
|
# "trashcan" is a hack to prevent stack overflow when deallocating
|
|
|
|
# very deeply nested tuples etc. It works in part by abusing the
|
|
|
|
# type pointer and refcount fields, and that can yield horrible
|
|
|
|
# problems when gc tries to traverse the structures.
|
|
|
|
# If this test fails (as it does in 2.0, 2.1 and 2.2), it will
|
|
|
|
# most likely die via segfault.
|
|
|
|
|
2002-07-11 16:07:45 -03:00
|
|
|
# Note: In 2.3 the possibility for compiling without cyclic gc was
|
|
|
|
# removed, and that in turn allows the trashcan mechanism to work
|
|
|
|
# via much simpler means (e.g., it never abuses the type pointer or
|
|
|
|
# refcount fields anymore). Since it's much less likely to cause a
|
|
|
|
# problem now, the various constants in this expensive (we force a lot
|
|
|
|
# of full collections) test are cut back from the 2.2 version.
|
2002-03-28 17:08:30 -04:00
|
|
|
gc.enable()
|
2002-07-11 16:07:45 -03:00
|
|
|
N = 150
|
|
|
|
for count in range(2):
|
2002-03-28 17:08:30 -04:00
|
|
|
t = []
|
|
|
|
for i in range(N):
|
|
|
|
t = [t, Ouch()]
|
|
|
|
u = []
|
|
|
|
for i in range(N):
|
|
|
|
u = [u, Ouch()]
|
|
|
|
v = {}
|
|
|
|
for i in range(N):
|
|
|
|
v = {1: v, 2: Ouch()}
|
|
|
|
gc.disable()
|
2000-06-30 02:02:53 -03:00
|
|
|
|
2003-04-05 20:11:39 -04:00
|
|
|
class Boom:
|
2003-04-05 13:46:04 -04:00
|
|
|
def __getattr__(self, someattribute):
|
2003-04-04 16:00:04 -04:00
|
|
|
del self.attr
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def test_boom():
|
2003-04-05 20:11:39 -04:00
|
|
|
a = Boom()
|
|
|
|
b = Boom()
|
2003-04-04 16:00:04 -04:00
|
|
|
a.attr = b
|
|
|
|
b.attr = a
|
|
|
|
|
|
|
|
gc.collect()
|
2003-04-05 13:46:04 -04:00
|
|
|
garbagelen = len(gc.garbage)
|
2003-04-04 16:00:04 -04:00
|
|
|
del a, b
|
2003-04-05 20:11:39 -04:00
|
|
|
# a<->b are in a trash cycle now. Collection will invoke Boom.__getattr__
|
2003-04-05 13:46:04 -04:00
|
|
|
# (to see whether a and b have __del__ methods), and __getattr__ deletes
|
|
|
|
# the internal "attr" attributes as a side effect. That causes the
|
|
|
|
# trash cycle to get reclaimed via refcounts falling to 0, thus mutating
|
|
|
|
# the trash graph as a side effect of merely asking whether __del__
|
Reworked has_finalizer() to use the new _PyObject_Lookup() instead
of PyObject_HasAttr(); the former promises never to execute
arbitrary Python code. Undid many of the changes recently made to
worm around the worst consequences of that PyObject_HasAttr() could
execute arbitrary Python code.
Compatibility is hard to discuss, because the dangerous cases are
so perverse, and much of this appears to rely on implementation
accidents.
To start with, using hasattr() to check for __del__ wasn't only
dangerous, in some cases it was wrong: if an instance of an old-
style class didn't have "__del__" in its instance dict or in any
base class dict, but a getattr hook said __del__ existed, then
hasattr() said "yes, this object has a __del__". But
instance_dealloc() ignores the possibility of getattr hooks when
looking for a __del__, so while object.__del__ succeeds, no
__del__ method is called when the object is deleted. gc was
therefore incorrect in believing that the object had a finalizer.
The new method doesn't suffer that problem (like instance_dealloc(),
_PyObject_Lookup() doesn't believe __del__ exists in that case), but
does suffer a somewhat opposite-- and even more obscure --oddity:
if an instance of an old-style class doesn't have "__del__" in its
instance dict, and a base class does have "__del__" in its dict,
and the first base class with a "__del__" associates it with a
descriptor (an object with a __get__ method), *and* if that
descriptor raises an exception when __get__ is called, then
(a) the current method believes the instance does have a __del__,
but (b) hasattr() does not believe the instance has a __del__.
While these disagree, I believe the new method is "more correct":
because the descriptor *will* be called when the object is
destructed, it can execute arbitrary Python code at the time the
object is destructed, and that's really what gc means by "has a
finalizer": not specifically a __del__ method, but more generally
the possibility of executing arbitrary Python code at object
destruction time. Code in a descriptor's __get__() executed at
destruction time can be just as problematic as code in a
__del__() executed then.
So I believe the new method is better on all counts.
Bugfix candidate, but it's unclear to me how all this differs in
the 2.2 branch (e.g., new-style and old-style classes already
took different gc paths in 2.3 before this last round of patches,
but don't in the 2.2 branch).
2003-04-07 16:21:15 -03:00
|
|
|
# exists. This used to (before 2.3b1) crash Python. Now __getattr__
|
|
|
|
# isn't called.
|
|
|
|
expect(gc.collect(), 4, "boom")
|
2003-04-05 13:46:04 -04:00
|
|
|
expect(len(gc.garbage), garbagelen, "boom")
|
2003-04-04 16:00:04 -04:00
|
|
|
|
2003-04-05 20:11:39 -04:00
|
|
|
class Boom2:
|
|
|
|
def __init__(self):
|
|
|
|
self.x = 0
|
|
|
|
|
|
|
|
def __getattr__(self, someattribute):
|
|
|
|
self.x += 1
|
|
|
|
if self.x > 1:
|
|
|
|
del self.attr
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def test_boom2():
|
|
|
|
a = Boom2()
|
|
|
|
b = Boom2()
|
|
|
|
a.attr = b
|
|
|
|
b.attr = a
|
|
|
|
|
|
|
|
gc.collect()
|
|
|
|
garbagelen = len(gc.garbage)
|
|
|
|
del a, b
|
|
|
|
# Much like test_boom(), except that __getattr__ doesn't break the
|
|
|
|
# cycle until the second time gc checks for __del__. As of 2.3b1,
|
|
|
|
# there isn't a second time, so this simply cleans up the trash cycle.
|
|
|
|
# We expect a, b, a.__dict__ and b.__dict__ (4 objects) to get reclaimed
|
|
|
|
# this way.
|
|
|
|
expect(gc.collect(), 4, "boom2")
|
|
|
|
expect(len(gc.garbage), garbagelen, "boom2")
|
|
|
|
|
2003-04-08 16:44:13 -03:00
|
|
|
# boom__new and boom2_new are exactly like boom and boom2, except use
|
|
|
|
# new-style classes.
|
|
|
|
|
|
|
|
class Boom_New(object):
|
|
|
|
def __getattr__(self, someattribute):
|
|
|
|
del self.attr
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def test_boom_new():
|
|
|
|
a = Boom_New()
|
|
|
|
b = Boom_New()
|
|
|
|
a.attr = b
|
|
|
|
b.attr = a
|
|
|
|
|
|
|
|
gc.collect()
|
|
|
|
garbagelen = len(gc.garbage)
|
|
|
|
del a, b
|
|
|
|
expect(gc.collect(), 4, "boom_new")
|
|
|
|
expect(len(gc.garbage), garbagelen, "boom_new")
|
|
|
|
|
|
|
|
class Boom2_New(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.x = 0
|
|
|
|
|
|
|
|
def __getattr__(self, someattribute):
|
|
|
|
self.x += 1
|
|
|
|
if self.x > 1:
|
|
|
|
del self.attr
|
|
|
|
raise AttributeError
|
|
|
|
|
|
|
|
def test_boom2_new():
|
|
|
|
a = Boom2_New()
|
|
|
|
b = Boom2_New()
|
|
|
|
a.attr = b
|
|
|
|
b.attr = a
|
|
|
|
|
|
|
|
gc.collect()
|
|
|
|
garbagelen = len(gc.garbage)
|
|
|
|
del a, b
|
|
|
|
expect(gc.collect(), 4, "boom2_new")
|
|
|
|
expect(len(gc.garbage), garbagelen, "boom2_new")
|
|
|
|
|
2003-04-08 14:17:17 -03:00
|
|
|
def test_get_referents():
|
2003-04-08 13:39:48 -03:00
|
|
|
alist = [1, 3, 5]
|
2003-04-08 14:17:17 -03:00
|
|
|
got = gc.get_referents(alist)
|
2003-04-08 13:39:48 -03:00
|
|
|
got.sort()
|
2003-04-08 14:17:17 -03:00
|
|
|
expect(got, alist, "get_referents")
|
2003-04-08 13:39:48 -03:00
|
|
|
|
|
|
|
atuple = tuple(alist)
|
2003-04-08 14:17:17 -03:00
|
|
|
got = gc.get_referents(atuple)
|
2003-04-08 13:39:48 -03:00
|
|
|
got.sort()
|
2003-04-08 14:17:17 -03:00
|
|
|
expect(got, alist, "get_referents")
|
2003-04-08 13:39:48 -03:00
|
|
|
|
|
|
|
adict = {1: 3, 5: 7}
|
|
|
|
expected = [1, 3, 5, 7]
|
2003-04-08 14:17:17 -03:00
|
|
|
got = gc.get_referents(adict)
|
2003-04-08 13:39:48 -03:00
|
|
|
got.sort()
|
2003-04-08 14:17:17 -03:00
|
|
|
expect(got, expected, "get_referents")
|
2003-04-08 13:39:48 -03:00
|
|
|
|
2003-04-08 14:17:17 -03:00
|
|
|
got = gc.get_referents([1, 2], {3: 4}, (0, 0, 0))
|
2003-04-08 13:39:48 -03:00
|
|
|
got.sort()
|
2003-04-08 14:17:17 -03:00
|
|
|
expect(got, [0, 0] + range(5), "get_referents")
|
2003-04-08 13:39:48 -03:00
|
|
|
|
2003-04-08 14:17:17 -03:00
|
|
|
expect(gc.get_referents(1, 'a', 4j), [], "get_referents")
|
2003-04-08 13:39:48 -03:00
|
|
|
|
2004-10-30 20:09:22 -03:00
|
|
|
# Bug 1055820 has several tests of longstanding bugs involving weakrefs and
|
|
|
|
# cyclic gc.
|
|
|
|
|
|
|
|
# An instance of C1055820 has a self-loop, so becomes cyclic trash when
|
|
|
|
# unreachable.
|
|
|
|
class C1055820(object):
|
|
|
|
def __init__(self, i):
|
|
|
|
self.i = i
|
|
|
|
self.loop = self
|
|
|
|
|
|
|
|
class GC_Detector(object):
|
|
|
|
# Create an instance I. Then gc hasn't happened again so long as
|
|
|
|
# I.gc_happened is false.
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.gc_happened = False
|
|
|
|
|
|
|
|
def it_happened(ignored):
|
|
|
|
self.gc_happened = True
|
|
|
|
|
|
|
|
# Create a piece of cyclic trash that triggers it_happened when
|
|
|
|
# gc collects it.
|
|
|
|
self.wr = weakref.ref(C1055820(666), it_happened)
|
|
|
|
|
|
|
|
def test_bug1055820b():
|
|
|
|
# Corresponds to temp2b.py in the bug report.
|
|
|
|
|
|
|
|
ouch = []
|
|
|
|
def callback(ignored):
|
|
|
|
ouch[:] = [wr() for wr in WRs]
|
|
|
|
|
|
|
|
Cs = [C1055820(i) for i in range(2)]
|
|
|
|
WRs = [weakref.ref(c, callback) for c in Cs]
|
|
|
|
c = None
|
|
|
|
|
|
|
|
gc.collect()
|
|
|
|
expect(len(ouch), 0, "bug1055820b")
|
|
|
|
# Make the two instances trash, and collect again. The bug was that
|
|
|
|
# the callback materialized a strong reference to an instance, but gc
|
|
|
|
# cleared the instance's dict anyway.
|
|
|
|
Cs = None
|
|
|
|
gc.collect()
|
|
|
|
expect(len(ouch), 2, "bug1055820b") # else the callbacks didn't run
|
|
|
|
for x in ouch:
|
|
|
|
# If the callback resurrected one of these guys, the instance
|
|
|
|
# would be damaged, with an empty __dict__.
|
|
|
|
expect(x, None, "bug1055820b")
|
|
|
|
|
|
|
|
def test_bug1055820c():
|
|
|
|
# Corresponds to temp2c.py in the bug report. This is pretty elaborate.
|
|
|
|
|
|
|
|
c0 = C1055820(0)
|
|
|
|
# Move c0 into generation 2.
|
|
|
|
gc.collect()
|
|
|
|
|
|
|
|
c1 = C1055820(1)
|
|
|
|
c1.keep_c0_alive = c0
|
|
|
|
del c0.loop # now only c1 keeps c0 alive
|
|
|
|
|
|
|
|
c2 = C1055820(2)
|
|
|
|
c2wr = weakref.ref(c2) # no callback!
|
|
|
|
|
|
|
|
ouch = []
|
|
|
|
def callback(ignored):
|
|
|
|
ouch[:] = [c2wr()]
|
|
|
|
|
|
|
|
# The callback gets associated with a wr on an object in generation 2.
|
|
|
|
c0wr = weakref.ref(c0, callback)
|
|
|
|
|
|
|
|
c0 = c1 = c2 = None
|
|
|
|
|
|
|
|
# What we've set up: c0, c1, and c2 are all trash now. c0 is in
|
|
|
|
# generation 2. The only thing keeping it alive is that c1 points to it.
|
|
|
|
# c1 and c2 are in generation 0, and are in self-loops. There's a global
|
|
|
|
# weakref to c2 (c2wr), but that weakref has no callback. There's also
|
|
|
|
# a global weakref to c0 (c0wr), and that does have a callback, and that
|
|
|
|
# callback references c2 via c2wr().
|
|
|
|
#
|
|
|
|
# c0 has a wr with callback, which references c2wr
|
|
|
|
# ^
|
|
|
|
# |
|
|
|
|
# | Generation 2 above dots
|
|
|
|
#. . . . . . . .|. . . . . . . . . . . . . . . . . . . . . . . .
|
|
|
|
# | Generation 0 below dots
|
|
|
|
# |
|
|
|
|
# |
|
|
|
|
# ^->c1 ^->c2 has a wr but no callback
|
|
|
|
# | | | |
|
|
|
|
# <--v <--v
|
|
|
|
#
|
|
|
|
# So this is the nightmare: when generation 0 gets collected, we see that
|
|
|
|
# c2 has a callback-free weakref, and c1 doesn't even have a weakref.
|
|
|
|
# Collecting generation 0 doesn't see c0 at all, and c0 is the only object
|
|
|
|
# that has a weakref with a callback. gc clears c1 and c2. Clearing c1
|
|
|
|
# has the side effect of dropping the refcount on c0 to 0, so c0 goes
|
|
|
|
# away (despite that it's in an older generation) and c0's wr callback
|
|
|
|
# triggers. That in turn materializes a reference to c2 via c2wr(), but
|
|
|
|
# c2 gets cleared anyway by gc.
|
|
|
|
|
|
|
|
# We want to let gc happen "naturally", to preserve the distinction
|
|
|
|
# between generations.
|
|
|
|
junk = []
|
|
|
|
i = 0
|
|
|
|
detector = GC_Detector()
|
|
|
|
while not detector.gc_happened:
|
|
|
|
i += 1
|
|
|
|
if i > 10000:
|
|
|
|
raise TestFailed("gc didn't happen after 10000 iterations")
|
|
|
|
expect(len(ouch), 0, "bug1055820c")
|
|
|
|
junk.append([]) # this will eventually trigger gc
|
|
|
|
|
|
|
|
expect(len(ouch), 1, "bug1055820c") # else the callback wasn't invoked
|
|
|
|
for x in ouch:
|
|
|
|
# If the callback resurrected c2, the instance would be damaged,
|
|
|
|
# with an empty __dict__.
|
|
|
|
expect(x, None, "bug1055820c")
|
|
|
|
|
|
|
|
def test_bug1055820d():
|
|
|
|
# Corresponds to temp2d.py in the bug report. This is very much like
|
|
|
|
# test_bug1055820c, but uses a __del__ method instead of a weakref
|
|
|
|
# callback to sneak in a resurrection of cyclic trash.
|
|
|
|
|
|
|
|
ouch = []
|
|
|
|
class D(C1055820):
|
|
|
|
def __del__(self):
|
|
|
|
ouch[:] = [c2wr()]
|
|
|
|
|
|
|
|
d0 = D(0)
|
|
|
|
# Move all the above into generation 2.
|
|
|
|
gc.collect()
|
|
|
|
|
|
|
|
c1 = C1055820(1)
|
|
|
|
c1.keep_d0_alive = d0
|
|
|
|
del d0.loop # now only c1 keeps d0 alive
|
|
|
|
|
|
|
|
c2 = C1055820(2)
|
|
|
|
c2wr = weakref.ref(c2) # no callback!
|
|
|
|
|
|
|
|
d0 = c1 = c2 = None
|
|
|
|
|
|
|
|
# What we've set up: d0, c1, and c2 are all trash now. d0 is in
|
|
|
|
# generation 2. The only thing keeping it alive is that c1 points to it.
|
|
|
|
# c1 and c2 are in generation 0, and are in self-loops. There's a global
|
|
|
|
# weakref to c2 (c2wr), but that weakref has no callback. There are no
|
|
|
|
# other weakrefs.
|
|
|
|
#
|
|
|
|
# d0 has a __del__ method that references c2wr
|
|
|
|
# ^
|
|
|
|
# |
|
|
|
|
# | Generation 2 above dots
|
|
|
|
#. . . . . . . .|. . . . . . . . . . . . . . . . . . . . . . . .
|
|
|
|
# | Generation 0 below dots
|
|
|
|
# |
|
|
|
|
# |
|
|
|
|
# ^->c1 ^->c2 has a wr but no callback
|
|
|
|
# | | | |
|
|
|
|
# <--v <--v
|
|
|
|
#
|
|
|
|
# So this is the nightmare: when generation 0 gets collected, we see that
|
|
|
|
# c2 has a callback-free weakref, and c1 doesn't even have a weakref.
|
|
|
|
# Collecting generation 0 doesn't see d0 at all. gc clears c1 and c2.
|
|
|
|
# Clearing c1 has the side effect of dropping the refcount on d0 to 0, so
|
|
|
|
# d0 goes away (despite that it's in an older generation) and d0's __del__
|
|
|
|
# triggers. That in turn materializes a reference to c2 via c2wr(), but
|
|
|
|
# c2 gets cleared anyway by gc.
|
|
|
|
|
|
|
|
# We want to let gc happen "naturally", to preserve the distinction
|
|
|
|
# between generations.
|
|
|
|
detector = GC_Detector()
|
|
|
|
junk = []
|
|
|
|
i = 0
|
|
|
|
while not detector.gc_happened:
|
|
|
|
i += 1
|
|
|
|
if i > 10000:
|
|
|
|
raise TestFailed("gc didn't happen after 10000 iterations")
|
|
|
|
expect(len(ouch), 0, "bug1055820d")
|
|
|
|
junk.append([]) # this will eventually trigger gc
|
|
|
|
|
|
|
|
expect(len(ouch), 1, "bug1055820d") # else __del__ wasn't invoked
|
|
|
|
for x in ouch:
|
|
|
|
# If __del__ resurrected c2, the instance would be damaged, with an
|
|
|
|
# empty __dict__.
|
|
|
|
expect(x, None, "bug1055820d")
|
|
|
|
|
|
|
|
|
2000-06-30 02:02:53 -03:00
|
|
|
def test_all():
|
2001-10-02 16:49:47 -03:00
|
|
|
gc.collect() # Delete 2nd generation garbage
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("lists", test_list)
|
|
|
|
run_test("dicts", test_dict)
|
|
|
|
run_test("tuples", test_tuple)
|
|
|
|
run_test("classes", test_class)
|
2001-10-15 19:49:27 -03:00
|
|
|
run_test("new style classes", test_newstyleclass)
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("instances", test_instance)
|
2001-10-05 17:51:39 -03:00
|
|
|
run_test("new instances", test_newinstance)
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("methods", test_method)
|
|
|
|
run_test("functions", test_function)
|
2001-07-12 10:25:53 -03:00
|
|
|
run_test("frames", test_frame)
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("finalizers", test_finalizer)
|
2002-08-09 14:38:16 -03:00
|
|
|
run_test("finalizers (new class)", test_finalizer_newclass)
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("__del__", test_del)
|
2002-08-09 14:38:16 -03:00
|
|
|
run_test("__del__ (new class)", test_del_newclass)
|
2006-03-07 05:46:03 -04:00
|
|
|
run_test("get_count()", test_get_count)
|
|
|
|
run_test("collect(n)", test_collect_generations)
|
2000-09-22 12:26:20 -03:00
|
|
|
run_test("saveall", test_saveall)
|
2002-03-28 17:08:30 -04:00
|
|
|
run_test("trashcan", test_trashcan)
|
2003-04-04 16:00:04 -04:00
|
|
|
run_test("boom", test_boom)
|
2003-04-05 20:11:39 -04:00
|
|
|
run_test("boom2", test_boom2)
|
2003-04-08 16:44:13 -03:00
|
|
|
run_test("boom_new", test_boom_new)
|
|
|
|
run_test("boom2_new", test_boom2_new)
|
2003-04-08 14:17:17 -03:00
|
|
|
run_test("get_referents", test_get_referents)
|
2004-10-30 20:09:22 -03:00
|
|
|
run_test("bug1055820b", test_bug1055820b)
|
|
|
|
|
|
|
|
gc.enable()
|
|
|
|
try:
|
|
|
|
run_test("bug1055820c", test_bug1055820c)
|
|
|
|
finally:
|
|
|
|
gc.disable()
|
|
|
|
|
|
|
|
gc.enable()
|
|
|
|
try:
|
|
|
|
run_test("bug1055820d", test_bug1055820d)
|
|
|
|
finally:
|
|
|
|
gc.disable()
|
2000-09-22 12:26:20 -03:00
|
|
|
|
|
|
|
def test():
|
|
|
|
if verbose:
|
|
|
|
print "disabling automatic collection"
|
2000-08-06 19:45:31 -03:00
|
|
|
enabled = gc.isenabled()
|
|
|
|
gc.disable()
|
2002-06-13 08:53:12 -03:00
|
|
|
verify(not gc.isenabled())
|
2000-09-22 12:26:20 -03:00
|
|
|
debug = gc.get_debug()
|
|
|
|
gc.set_debug(debug & ~gc.DEBUG_LEAK) # this test is supposed to leak
|
|
|
|
|
|
|
|
try:
|
|
|
|
test_all()
|
|
|
|
finally:
|
|
|
|
gc.set_debug(debug)
|
|
|
|
# test gc.enable() even if GC is disabled by default
|
|
|
|
if verbose:
|
|
|
|
print "restoring automatic collection"
|
|
|
|
# make sure to always test gc.enable()
|
|
|
|
gc.enable()
|
2001-01-17 15:11:13 -04:00
|
|
|
verify(gc.isenabled())
|
2000-09-22 12:26:20 -03:00
|
|
|
if not enabled:
|
|
|
|
gc.disable()
|
|
|
|
|
|
|
|
|
|
|
|
test()
|