2002-11-19 13:47:07 -04:00
|
|
|
"""
|
2003-07-09 01:45:59 -03:00
|
|
|
TestCases for DB.associate.
|
2002-11-19 13:47:07 -04:00
|
|
|
"""
|
|
|
|
|
2007-08-28 05:05:56 -03:00
|
|
|
import shutil
|
2007-04-17 05:48:32 -03:00
|
|
|
import sys, os
|
2002-11-19 13:47:07 -04:00
|
|
|
import tempfile
|
|
|
|
import time
|
|
|
|
from pprint import pprint
|
|
|
|
|
|
|
|
try:
|
|
|
|
from threading import Thread, currentThread
|
|
|
|
have_threads = 1
|
|
|
|
except ImportError:
|
|
|
|
have_threads = 0
|
|
|
|
|
|
|
|
import unittest
|
2007-08-28 05:05:56 -03:00
|
|
|
from bsddb.test.test_all import verbose
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2003-01-28 13:20:44 -04:00
|
|
|
try:
|
2003-09-20 21:08:14 -03:00
|
|
|
# For Pythons w/distutils pybsddb
|
|
|
|
from bsddb3 import db, dbshelve
|
|
|
|
except ImportError:
|
2003-01-28 13:20:44 -04:00
|
|
|
# For Python 2.3
|
|
|
|
from bsddb import db, dbshelve
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
musicdata = {
|
|
|
|
1 : ("Bad English", "The Price Of Love", "Rock"),
|
|
|
|
2 : ("DNA featuring Suzanne Vega", "Tom's Diner", "Rock"),
|
|
|
|
3 : ("George Michael", "Praying For Time", "Rock"),
|
|
|
|
4 : ("Gloria Estefan", "Here We Are", "Rock"),
|
|
|
|
5 : ("Linda Ronstadt", "Don't Know Much", "Rock"),
|
|
|
|
6 : ("Michael Bolton", "How Am I Supposed To Live Without You", "Blues"),
|
|
|
|
7 : ("Paul Young", "Oh Girl", "Rock"),
|
|
|
|
8 : ("Paula Abdul", "Opposites Attract", "Rock"),
|
|
|
|
9 : ("Richard Marx", "Should've Known Better", "Rock"),
|
|
|
|
10: ("Rod Stewart", "Forever Young", "Rock"),
|
|
|
|
11: ("Roxette", "Dangerous", "Rock"),
|
|
|
|
12: ("Sheena Easton", "The Lover In Me", "Rock"),
|
|
|
|
13: ("Sinead O'Connor", "Nothing Compares 2 U", "Rock"),
|
|
|
|
14: ("Stevie B.", "Because I Love You", "Rock"),
|
|
|
|
15: ("Taylor Dayne", "Love Will Lead You Back", "Rock"),
|
|
|
|
16: ("The Bangles", "Eternal Flame", "Rock"),
|
|
|
|
17: ("Wilson Phillips", "Release Me", "Rock"),
|
|
|
|
18: ("Billy Joel", "Blonde Over Blue", "Rock"),
|
|
|
|
19: ("Billy Joel", "Famous Last Words", "Rock"),
|
|
|
|
20: ("Billy Joel", "Lullabye (Goodnight, My Angel)", "Rock"),
|
|
|
|
21: ("Billy Joel", "The River Of Dreams", "Rock"),
|
|
|
|
22: ("Billy Joel", "Two Thousand Years", "Rock"),
|
|
|
|
23: ("Janet Jackson", "Alright", "Rock"),
|
|
|
|
24: ("Janet Jackson", "Black Cat", "Rock"),
|
|
|
|
25: ("Janet Jackson", "Come Back To Me", "Rock"),
|
|
|
|
26: ("Janet Jackson", "Escapade", "Rock"),
|
|
|
|
27: ("Janet Jackson", "Love Will Never Do (Without You)", "Rock"),
|
|
|
|
28: ("Janet Jackson", "Miss You Much", "Rock"),
|
|
|
|
29: ("Janet Jackson", "Rhythm Nation", "Rock"),
|
|
|
|
30: ("Janet Jackson", "State Of The World", "Rock"),
|
|
|
|
31: ("Janet Jackson", "The Knowledge", "Rock"),
|
|
|
|
32: ("Spyro Gyra", "End of Romanticism", "Jazz"),
|
|
|
|
33: ("Spyro Gyra", "Heliopolis", "Jazz"),
|
|
|
|
34: ("Spyro Gyra", "Jubilee", "Jazz"),
|
|
|
|
35: ("Spyro Gyra", "Little Linda", "Jazz"),
|
|
|
|
36: ("Spyro Gyra", "Morning Dance", "Jazz"),
|
|
|
|
37: ("Spyro Gyra", "Song for Lorraine", "Jazz"),
|
|
|
|
38: ("Yes", "Owner Of A Lonely Heart", "Rock"),
|
|
|
|
39: ("Yes", "Rhythm Of Love", "Rock"),
|
|
|
|
40: ("Cusco", "Dream Catcher", "New Age"),
|
|
|
|
41: ("Cusco", "Geronimos Laughter", "New Age"),
|
|
|
|
42: ("Cusco", "Ghost Dance", "New Age"),
|
|
|
|
43: ("Blue Man Group", "Drumbone", "New Age"),
|
|
|
|
44: ("Blue Man Group", "Endless Column", "New Age"),
|
|
|
|
45: ("Blue Man Group", "Klein Mandelbrot", "New Age"),
|
|
|
|
46: ("Kenny G", "Silhouette", "Jazz"),
|
|
|
|
47: ("Sade", "Smooth Operator", "Jazz"),
|
2002-12-30 16:53:52 -04:00
|
|
|
48: ("David Arkenstone", "Papillon (On The Wings Of The Butterfly)",
|
|
|
|
"New Age"),
|
2002-11-19 13:47:07 -04:00
|
|
|
49: ("David Arkenstone", "Stepping Stars", "New Age"),
|
|
|
|
50: ("David Arkenstone", "Carnation Lily Lily Rose", "New Age"),
|
|
|
|
51: ("David Lanz", "Behind The Waterfall", "New Age"),
|
|
|
|
52: ("David Lanz", "Cristofori's Dream", "New Age"),
|
|
|
|
53: ("David Lanz", "Heartsounds", "New Age"),
|
|
|
|
54: ("David Lanz", "Leaves on the Seine", "New Age"),
|
2004-06-28 01:06:49 -03:00
|
|
|
99: ("unknown artist", "Unnamed song", "Unknown"),
|
2002-11-19 13:47:07 -04:00
|
|
|
}
|
|
|
|
|
2005-06-06 07:26:25 -03:00
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
class AssociateErrorTestCase(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.filename = self.__class__.__name__ + '.db'
|
2008-02-25 08:39:23 -04:00
|
|
|
homeDir = os.path.join(tempfile.gettempdir(), 'db_home%d'%os.getpid())
|
|
|
|
self.homeDir = homeDir
|
|
|
|
try:
|
|
|
|
os.mkdir(homeDir)
|
|
|
|
except os.error:
|
|
|
|
import glob
|
|
|
|
files = glob.glob(os.path.join(self.homeDir, '*'))
|
|
|
|
for file in files:
|
|
|
|
os.remove(file)
|
2005-06-06 07:26:25 -03:00
|
|
|
self.env = db.DBEnv()
|
2007-08-28 05:05:56 -03:00
|
|
|
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
|
2005-06-06 07:26:25 -03:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.env.close()
|
2006-03-11 20:13:09 -04:00
|
|
|
self.env = None
|
2008-02-25 08:39:23 -04:00
|
|
|
from test import test_support
|
|
|
|
test_support.rmtree(self.homeDir)
|
2005-06-06 07:26:25 -03:00
|
|
|
|
|
|
|
def test00_associateDBError(self):
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('\n', '-=' * 30)
|
|
|
|
print("Running %s.test00_associateDBError..." % \
|
|
|
|
self.__class__.__name__)
|
2005-06-06 07:26:25 -03:00
|
|
|
|
|
|
|
dupDB = db.DB(self.env)
|
|
|
|
dupDB.set_flags(db.DB_DUP)
|
|
|
|
dupDB.open(self.filename, "primary", db.DB_BTREE, db.DB_CREATE)
|
|
|
|
|
|
|
|
secDB = db.DB(self.env)
|
|
|
|
secDB.open(self.filename, "secondary", db.DB_BTREE, db.DB_CREATE)
|
|
|
|
|
|
|
|
# dupDB has been configured to allow duplicates, it can't
|
|
|
|
# associate with a secondary. BerkeleyDB will return an error.
|
2005-06-08 01:35:50 -03:00
|
|
|
try:
|
|
|
|
def f(a,b): return a+b
|
|
|
|
dupDB.associate(secDB, f)
|
|
|
|
except db.DBError:
|
|
|
|
# good
|
|
|
|
secDB.close()
|
|
|
|
dupDB.close()
|
|
|
|
else:
|
|
|
|
secDB.close()
|
|
|
|
dupDB.close()
|
|
|
|
self.fail("DBError exception was expected")
|
2005-06-06 07:26:25 -03:00
|
|
|
|
|
|
|
|
|
|
|
|
2002-11-19 13:47:07 -04:00
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
class AssociateTestCase(unittest.TestCase):
|
|
|
|
keytype = ''
|
2005-06-06 06:52:10 -03:00
|
|
|
envFlags = 0
|
|
|
|
dbFlags = 0
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.filename = self.__class__.__name__ + '.db'
|
2008-02-25 08:39:23 -04:00
|
|
|
homeDir = os.path.join(tempfile.gettempdir(), 'db_home%d'%os.getpid())
|
Merged revisions 60284-60349 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/trunk
........
r60286 | christian.heimes | 2008-01-25 15:54:23 +0100 (Fri, 25 Jan 2008) | 1 line
setup.py doesn't pick up changes to a header file
........
r60287 | christian.heimes | 2008-01-25 16:52:11 +0100 (Fri, 25 Jan 2008) | 2 lines
Added the Python core headers Include/*.h and pyconfig.h as dependencies for the extensions in Modules/
It forces a rebuild of all extensions when a header files has been modified
........
r60291 | raymond.hettinger | 2008-01-25 20:24:46 +0100 (Fri, 25 Jan 2008) | 4 lines
Changes 54857 and 54840 broke code and were reverted in Py2.5 just before
it was released, but that reversion never made it to the Py2.6 head.
........
r60296 | guido.van.rossum | 2008-01-25 20:50:26 +0100 (Fri, 25 Jan 2008) | 2 lines
Rewrite the list_inline_repeat overflow check slightly differently.
........
r60301 | thomas.wouters | 2008-01-25 22:09:34 +0100 (Fri, 25 Jan 2008) | 4 lines
Use the right (portable) definition of the max of a Py_ssize_t.
........
r60303 | thomas.wouters | 2008-01-26 02:47:05 +0100 (Sat, 26 Jan 2008) | 5 lines
Make 'testall' work again when building in a separate directory.
test_distutils still fails when doing that.
........
r60305 | neal.norwitz | 2008-01-26 06:54:48 +0100 (Sat, 26 Jan 2008) | 3 lines
Prevent this test from failing if there are transient network problems
by retrying the host for up to 3 times.
........
r60306 | neal.norwitz | 2008-01-26 08:26:12 +0100 (Sat, 26 Jan 2008) | 12 lines
Use a condition variable (threading.Event) rather than sleeps and checking a
global to determine when the server is ready to be used. This slows the test
down, but should make it correct. There was a race condition before where the
server could have assigned a port, yet it wasn't ready to serve requests. If
the client sent a request before the server was completely ready, it would get
an exception. There was machinery to try to handle this condition. All of
that should be unnecessary and removed if this change works. A NOTE was
added as a comment about what needs to be fixed.
The buildbots will tell us if there are more errors or
if this test is now stable.
........
r60307 | neal.norwitz | 2008-01-26 08:38:03 +0100 (Sat, 26 Jan 2008) | 3 lines
Fix exception in tearDown on ppc buildbot. If there's no directory,
that shouldn't cause the test to fail. Just like it setUp.
........
r60308 | raymond.hettinger | 2008-01-26 09:19:06 +0100 (Sat, 26 Jan 2008) | 3 lines
Make PySet_Add() work with frozensets. Works like PyTuple_SetItem() to build-up values in a brand new frozenset.
........
r60309 | neal.norwitz | 2008-01-26 09:26:00 +0100 (Sat, 26 Jan 2008) | 1 line
The OS X buildbot had errors with the unavailable exceptions disabled. Restore it.
........
r60310 | raymond.hettinger | 2008-01-26 09:37:28 +0100 (Sat, 26 Jan 2008) | 4 lines
Let marshal build-up sets and frozensets one element at a time.
Saves the unnecessary creation of a tuple as intermediate container.
........
r60311 | raymond.hettinger | 2008-01-26 09:41:13 +0100 (Sat, 26 Jan 2008) | 1 line
Update test code for change to PySet_Add().
........
r60312 | raymond.hettinger | 2008-01-26 10:31:11 +0100 (Sat, 26 Jan 2008) | 1 line
Revert PySet_Add() changes.
........
r60314 | georg.brandl | 2008-01-26 10:43:35 +0100 (Sat, 26 Jan 2008) | 2 lines
#1934: fix os.path.isabs docs.
........
r60316 | georg.brandl | 2008-01-26 12:00:18 +0100 (Sat, 26 Jan 2008) | 2 lines
Add missing things in re docstring.
........
r60317 | georg.brandl | 2008-01-26 12:02:22 +0100 (Sat, 26 Jan 2008) | 2 lines
Slashes allowed on Windows.
........
r60319 | georg.brandl | 2008-01-26 14:41:21 +0100 (Sat, 26 Jan 2008) | 2 lines
Fix markup again.
........
r60320 | andrew.kuchling | 2008-01-26 14:50:51 +0100 (Sat, 26 Jan 2008) | 1 line
Add some items
........
r60321 | georg.brandl | 2008-01-26 15:02:38 +0100 (Sat, 26 Jan 2008) | 2 lines
Clarify "b" mode under Unix.
........
r60322 | georg.brandl | 2008-01-26 15:03:47 +0100 (Sat, 26 Jan 2008) | 3 lines
#1940: make it possible to use curses.filter() before curses.initscr()
as the documentation says.
........
r60324 | georg.brandl | 2008-01-26 15:14:20 +0100 (Sat, 26 Jan 2008) | 3 lines
#1473257: add generator.gi_code attribute that refers to
the original code object backing the generator. Patch by Collin Winter.
........
r60325 | georg.brandl | 2008-01-26 15:19:22 +0100 (Sat, 26 Jan 2008) | 2 lines
Move C API entries to the corresponding section.
........
r60326 | christian.heimes | 2008-01-26 17:43:35 +0100 (Sat, 26 Jan 2008) | 1 line
Unit test fix from Giampaolo Rodola, #1938
........
r60327 | gregory.p.smith | 2008-01-26 19:51:05 +0100 (Sat, 26 Jan 2008) | 2 lines
Update docs for new callpack params added in r60188
........
r60329 | neal.norwitz | 2008-01-26 21:24:36 +0100 (Sat, 26 Jan 2008) | 3 lines
Cleanup the code a bit. test_rfind is failing on PPC and PPC64 buildbots,
this might fix the problem.
........
r60330 | neal.norwitz | 2008-01-26 22:02:45 +0100 (Sat, 26 Jan 2008) | 1 line
Always try to remove the test file even if close raises an exception
........
r60331 | neal.norwitz | 2008-01-26 22:21:59 +0100 (Sat, 26 Jan 2008) | 3 lines
Reduce the race condition by signalling when the server is ready
and not trying to connect before.
........
r60334 | neal.norwitz | 2008-01-27 00:13:46 +0100 (Sun, 27 Jan 2008) | 5 lines
On some systems (e.g., Ubuntu on hppa) the flush()
doesn't cause the exception, but the close() does.
Will backport.
........
r60335 | neal.norwitz | 2008-01-27 00:14:17 +0100 (Sun, 27 Jan 2008) | 2 lines
Consistently use tempfile.tempdir for the db_home directory.
........
r60338 | neal.norwitz | 2008-01-27 02:44:05 +0100 (Sun, 27 Jan 2008) | 4 lines
Eliminate the sleeps that assume the server will start in .5 seconds.
This should make the test less flaky. It also speeds up the test
by about 75% on my box (20+ seconds -> ~4 seconds).
........
r60342 | neal.norwitz | 2008-01-27 06:02:34 +0100 (Sun, 27 Jan 2008) | 6 lines
Try to prevent this test from being flaky. We might need a sleep in here
which isn't as bad as it sounds. The close() *should* raise an exception,
so if it didn't we should give more time to sync and really raise it.
Will backport.
........
r60344 | jeffrey.yasskin | 2008-01-27 06:40:35 +0100 (Sun, 27 Jan 2008) | 3 lines
Make rational.gcd() public and allow Rational to take decimal strings, per
Raymond's advice.
........
r60345 | neal.norwitz | 2008-01-27 08:36:03 +0100 (Sun, 27 Jan 2008) | 3 lines
Mostly reformat. Also set an error and return NULL if neither MS_WINDOWS
nor UNIX is defined. This may have caused problems on cygwin.
........
r60346 | neal.norwitz | 2008-01-27 08:37:38 +0100 (Sun, 27 Jan 2008) | 3 lines
Use int for the sign rather than a char. char can be signed or unsigned.
It's system dependent. This might fix the problem with test_rfind failing.
........
r60347 | neal.norwitz | 2008-01-27 08:41:33 +0100 (Sun, 27 Jan 2008) | 1 line
Add stdarg include for va_list to get this to compile on cygwin
........
r60348 | raymond.hettinger | 2008-01-27 11:13:57 +0100 (Sun, 27 Jan 2008) | 1 line
Docstring nit
........
r60349 | raymond.hettinger | 2008-01-27 11:47:55 +0100 (Sun, 27 Jan 2008) | 1 line
Removed an unnecessary and confusing paragraph from the namedtuple docs.
........
2008-01-27 11:18:18 -04:00
|
|
|
self.homeDir = homeDir
|
|
|
|
try:
|
|
|
|
os.mkdir(homeDir)
|
|
|
|
except os.error:
|
|
|
|
import glob
|
|
|
|
files = glob.glob(os.path.join(self.homeDir, '*'))
|
|
|
|
for file in files:
|
|
|
|
os.remove(file)
|
2002-11-19 13:47:07 -04:00
|
|
|
self.env = db.DBEnv()
|
2007-08-28 05:05:56 -03:00
|
|
|
self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL |
|
2005-06-06 06:52:10 -03:00
|
|
|
db.DB_INIT_LOCK | db.DB_THREAD | self.envFlags)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.closeDB()
|
|
|
|
self.env.close()
|
2006-03-11 20:13:09 -04:00
|
|
|
self.env = None
|
2007-08-28 05:05:56 -03:00
|
|
|
shutil.rmtree(self.homeDir)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2005-06-06 06:52:10 -03:00
|
|
|
def addDataToDB(self, d, txn=None):
|
2002-11-19 13:47:07 -04:00
|
|
|
for key, value in musicdata.items():
|
|
|
|
if type(self.keytype) == type(''):
|
2007-08-08 19:08:30 -03:00
|
|
|
key = ("%02d" % key).encode("utf-8")
|
|
|
|
d.put(key, '|'.join(value).encode("utf-8"), txn=txn)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2005-06-06 06:52:10 -03:00
|
|
|
def createDB(self, txn=None):
|
2005-06-08 01:35:50 -03:00
|
|
|
self.cur = None
|
|
|
|
self.secDB = None
|
2002-11-19 13:47:07 -04:00
|
|
|
self.primary = db.DB(self.env)
|
2004-06-28 01:06:49 -03:00
|
|
|
self.primary.set_get_returns_none(2)
|
2005-06-08 01:35:50 -03:00
|
|
|
if db.version() >= (4, 1):
|
|
|
|
self.primary.open(self.filename, "primary", self.dbtype,
|
2005-06-06 06:52:10 -03:00
|
|
|
db.DB_CREATE | db.DB_THREAD | self.dbFlags, txn=txn)
|
2005-06-08 01:35:50 -03:00
|
|
|
else:
|
|
|
|
self.primary.open(self.filename, "primary", self.dbtype,
|
2005-06-06 15:12:24 -03:00
|
|
|
db.DB_CREATE | db.DB_THREAD | self.dbFlags)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
def closeDB(self):
|
2005-06-08 01:35:50 -03:00
|
|
|
if self.cur:
|
|
|
|
self.cur.close()
|
2006-03-11 20:13:09 -04:00
|
|
|
self.cur = None
|
2005-06-08 01:35:50 -03:00
|
|
|
if self.secDB:
|
|
|
|
self.secDB.close()
|
2006-03-11 20:13:09 -04:00
|
|
|
self.secDB = None
|
2002-11-19 13:47:07 -04:00
|
|
|
self.primary.close()
|
2006-03-11 20:13:09 -04:00
|
|
|
self.primary = None
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
def getDB(self):
|
|
|
|
return self.primary
|
|
|
|
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2002-11-19 13:47:07 -04:00
|
|
|
def test01_associateWithDB(self):
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('\n', '-=' * 30)
|
|
|
|
print("Running %s.test01_associateWithDB..." % \
|
|
|
|
self.__class__.__name__)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
self.createDB()
|
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
self.secDB = db.DB(self.env)
|
|
|
|
self.secDB.set_flags(db.DB_DUP)
|
|
|
|
self.secDB.set_get_returns_none(2)
|
|
|
|
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
2005-06-06 06:52:10 -03:00
|
|
|
db.DB_CREATE | db.DB_THREAD | self.dbFlags)
|
2005-06-06 14:59:00 -03:00
|
|
|
self.getDB().associate(self.secDB, self.getGenre)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
self.addDataToDB(self.getDB())
|
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
self.finish_test(self.secDB)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
def test02_associateAfterDB(self):
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('\n', '-=' * 30)
|
|
|
|
print("Running %s.test02_associateAfterDB..." % \
|
|
|
|
self.__class__.__name__)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
self.createDB()
|
|
|
|
self.addDataToDB(self.getDB())
|
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
self.secDB = db.DB(self.env)
|
|
|
|
self.secDB.set_flags(db.DB_DUP)
|
|
|
|
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
2005-06-06 06:52:10 -03:00
|
|
|
db.DB_CREATE | db.DB_THREAD | self.dbFlags)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
# adding the DB_CREATE flag will cause it to index existing records
|
2005-06-06 14:59:00 -03:00
|
|
|
self.getDB().associate(self.secDB, self.getGenre, db.DB_CREATE)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
self.finish_test(self.secDB)
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
2005-06-06 06:52:10 -03:00
|
|
|
def finish_test(self, secDB, txn=None):
|
2004-06-28 01:06:49 -03:00
|
|
|
# 'Blues' should not be in the secondary database
|
2007-08-08 19:08:30 -03:00
|
|
|
vals = secDB.pget(b'Blues', txn=txn)
|
2004-06-28 01:06:49 -03:00
|
|
|
assert vals == None, vals
|
|
|
|
|
2007-08-08 19:08:30 -03:00
|
|
|
vals = secDB.pget(b'Unknown', txn=txn)
|
|
|
|
assert vals[0] == 99 or vals[0] == b'99', vals
|
|
|
|
vals[1].index(b'Unknown')
|
|
|
|
vals[1].index(b'Unnamed')
|
|
|
|
vals[1].index(b'unknown')
|
2004-06-28 01:06:49 -03:00
|
|
|
|
2002-11-19 13:47:07 -04:00
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print("Primary key traversal:")
|
2005-06-06 14:59:00 -03:00
|
|
|
self.cur = self.getDB().cursor(txn)
|
2002-11-19 13:47:07 -04:00
|
|
|
count = 0
|
2005-06-06 14:59:00 -03:00
|
|
|
rec = self.cur.first()
|
2002-11-19 13:47:07 -04:00
|
|
|
while rec is not None:
|
|
|
|
if type(self.keytype) == type(''):
|
2007-04-17 05:48:32 -03:00
|
|
|
assert int(rec[0]) # for primary db, key is a number
|
2002-11-19 13:47:07 -04:00
|
|
|
else:
|
|
|
|
assert rec[0] and type(rec[0]) == type(0)
|
|
|
|
count = count + 1
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print(rec)
|
2005-06-06 14:59:00 -03:00
|
|
|
rec = self.cur.next()
|
2002-11-19 13:47:07 -04:00
|
|
|
assert count == len(musicdata) # all items accounted for
|
|
|
|
|
|
|
|
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print("Secondary key traversal:")
|
2005-06-06 14:59:00 -03:00
|
|
|
self.cur = secDB.cursor(txn)
|
2002-11-19 13:47:07 -04:00
|
|
|
count = 0
|
2004-06-28 01:06:49 -03:00
|
|
|
|
|
|
|
# test cursor pget
|
2007-08-08 19:08:30 -03:00
|
|
|
vals = self.cur.pget(b'Unknown', flags=db.DB_LAST)
|
|
|
|
assert vals[1] == 99 or vals[1] == b'99', vals
|
|
|
|
assert vals[0] == b'Unknown'
|
|
|
|
vals[2].index(b'Unknown')
|
|
|
|
vals[2].index(b'Unnamed')
|
|
|
|
vals[2].index(b'unknown')
|
|
|
|
|
|
|
|
vals = self.cur.pget(b'Unknown', data=b'wrong value', flags=db.DB_GET_BOTH)
|
2004-06-28 01:06:49 -03:00
|
|
|
assert vals == None, vals
|
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
rec = self.cur.first()
|
2007-08-08 19:08:30 -03:00
|
|
|
assert rec[0] == b"Jazz"
|
2002-11-19 13:47:07 -04:00
|
|
|
while rec is not None:
|
|
|
|
count = count + 1
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print(rec)
|
2005-06-06 14:59:00 -03:00
|
|
|
rec = self.cur.next()
|
2002-12-30 16:53:52 -04:00
|
|
|
# all items accounted for EXCEPT for 1 with "Blues" genre
|
|
|
|
assert count == len(musicdata)-1
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2005-06-08 01:35:50 -03:00
|
|
|
self.cur = None
|
2005-06-06 14:59:00 -03:00
|
|
|
|
2002-11-19 13:47:07 -04:00
|
|
|
def getGenre(self, priKey, priData):
|
2007-08-08 19:08:30 -03:00
|
|
|
assert type(priData) == type(b"")
|
|
|
|
priData = priData.decode("utf-8")
|
2002-11-19 13:47:07 -04:00
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('getGenre key: %r data: %r' % (priKey, priData))
|
2007-04-17 05:48:32 -03:00
|
|
|
genre = priData.split('|')[2]
|
2002-11-19 13:47:07 -04:00
|
|
|
if genre == 'Blues':
|
|
|
|
return db.DB_DONOTINDEX
|
|
|
|
else:
|
2007-08-08 19:08:30 -03:00
|
|
|
return genre.encode("utf-8")
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
class AssociateHashTestCase(AssociateTestCase):
|
|
|
|
dbtype = db.DB_HASH
|
|
|
|
|
|
|
|
class AssociateBTreeTestCase(AssociateTestCase):
|
|
|
|
dbtype = db.DB_BTREE
|
|
|
|
|
|
|
|
class AssociateRecnoTestCase(AssociateTestCase):
|
|
|
|
dbtype = db.DB_RECNO
|
|
|
|
keytype = 0
|
|
|
|
|
2005-06-06 06:52:10 -03:00
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
class AssociateBTreeTxnTestCase(AssociateBTreeTestCase):
|
|
|
|
envFlags = db.DB_INIT_TXN
|
|
|
|
dbFlags = 0
|
|
|
|
|
2005-06-06 14:59:00 -03:00
|
|
|
def txn_finish_test(self, sDB, txn):
|
2005-06-08 01:35:50 -03:00
|
|
|
try:
|
|
|
|
self.finish_test(sDB, txn=txn)
|
|
|
|
finally:
|
|
|
|
if self.cur:
|
|
|
|
self.cur.close()
|
|
|
|
self.cur = None
|
|
|
|
if txn:
|
|
|
|
txn.commit()
|
2005-06-06 14:59:00 -03:00
|
|
|
|
|
|
|
def test13_associate_in_transaction(self):
|
2005-06-06 06:52:10 -03:00
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('\n', '-=' * 30)
|
|
|
|
print("Running %s.test13_associateAutoCommit..." % \
|
|
|
|
self.__class__.__name__)
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2005-06-08 01:35:50 -03:00
|
|
|
txn = self.env.txn_begin()
|
|
|
|
try:
|
|
|
|
self.createDB(txn=txn)
|
|
|
|
|
|
|
|
self.secDB = db.DB(self.env)
|
|
|
|
self.secDB.set_flags(db.DB_DUP)
|
|
|
|
self.secDB.set_get_returns_none(2)
|
|
|
|
self.secDB.open(self.filename, "secondary", db.DB_BTREE,
|
|
|
|
db.DB_CREATE | db.DB_THREAD, txn=txn)
|
|
|
|
if db.version() >= (4,1):
|
|
|
|
self.getDB().associate(self.secDB, self.getGenre, txn=txn)
|
|
|
|
else:
|
|
|
|
self.getDB().associate(self.secDB, self.getGenre)
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2005-06-08 01:35:50 -03:00
|
|
|
self.addDataToDB(self.getDB(), txn=txn)
|
|
|
|
except:
|
|
|
|
txn.abort()
|
|
|
|
raise
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2005-06-08 01:35:50 -03:00
|
|
|
self.txn_finish_test(self.secDB, txn=txn)
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
class ShelveAssociateTestCase(AssociateTestCase):
|
|
|
|
|
|
|
|
def createDB(self):
|
|
|
|
self.primary = dbshelve.open(self.filename,
|
|
|
|
dbname="primary",
|
|
|
|
dbenv=self.env,
|
|
|
|
filetype=self.dbtype)
|
|
|
|
|
|
|
|
def addDataToDB(self, d):
|
|
|
|
for key, value in musicdata.items():
|
|
|
|
if type(self.keytype) == type(''):
|
2007-08-08 19:08:30 -03:00
|
|
|
key = ("%02d" % key).encode("utf-8")
|
2002-11-19 13:47:07 -04:00
|
|
|
d.put(key, value) # save the value as is this time
|
|
|
|
|
|
|
|
|
|
|
|
def getGenre(self, priKey, priData):
|
|
|
|
assert type(priData) == type(())
|
|
|
|
if verbose:
|
2007-02-09 01:37:30 -04:00
|
|
|
print('getGenre key: %r data: %r' % (priKey, priData))
|
2002-11-19 13:47:07 -04:00
|
|
|
genre = priData[2]
|
|
|
|
if genre == 'Blues':
|
|
|
|
return db.DB_DONOTINDEX
|
|
|
|
else:
|
2007-08-08 19:08:30 -03:00
|
|
|
return genre.encode("utf-8")
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ShelveAssociateHashTestCase(ShelveAssociateTestCase):
|
2002-11-23 22:35:35 -04:00
|
|
|
dbtype = db.DB_HASH
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
class ShelveAssociateBTreeTestCase(ShelveAssociateTestCase):
|
2002-11-23 22:35:35 -04:00
|
|
|
dbtype = db.DB_BTREE
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
class ShelveAssociateRecnoTestCase(ShelveAssociateTestCase):
|
|
|
|
dbtype = db.DB_RECNO
|
|
|
|
keytype = 0
|
|
|
|
|
|
|
|
|
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
|
|
|
class ThreadedAssociateTestCase(AssociateTestCase):
|
|
|
|
|
|
|
|
def addDataToDB(self, d):
|
|
|
|
t1 = Thread(target = self.writer1,
|
|
|
|
args = (d, ))
|
|
|
|
t2 = Thread(target = self.writer2,
|
|
|
|
args = (d, ))
|
|
|
|
|
|
|
|
t1.start()
|
|
|
|
t2.start()
|
|
|
|
t1.join()
|
|
|
|
t2.join()
|
|
|
|
|
|
|
|
def writer1(self, d):
|
|
|
|
for key, value in musicdata.items():
|
|
|
|
if type(self.keytype) == type(''):
|
2007-08-08 19:08:30 -03:00
|
|
|
key = ("%02d" % key).encode("utf-8")
|
2007-04-17 05:48:32 -03:00
|
|
|
d.put(key, '|'.join(value))
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
def writer2(self, d):
|
|
|
|
for x in range(100, 600):
|
|
|
|
key = 'z%2d' % x
|
|
|
|
value = [key] * 4
|
2007-04-17 05:48:32 -03:00
|
|
|
d.put(key, '|'.join(value))
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
class ThreadedAssociateHashTestCase(ShelveAssociateTestCase):
|
2002-11-23 22:35:35 -04:00
|
|
|
dbtype = db.DB_HASH
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
class ThreadedAssociateBTreeTestCase(ShelveAssociateTestCase):
|
2002-11-23 22:35:35 -04:00
|
|
|
dbtype = db.DB_BTREE
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
class ThreadedAssociateRecnoTestCase(ShelveAssociateTestCase):
|
|
|
|
dbtype = db.DB_RECNO
|
|
|
|
keytype = 0
|
|
|
|
|
|
|
|
|
|
|
|
#----------------------------------------------------------------------
|
|
|
|
|
2002-12-30 16:53:52 -04:00
|
|
|
def test_suite():
|
|
|
|
suite = unittest.TestSuite()
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
if db.version() >= (3, 3, 11):
|
2005-06-06 07:26:25 -03:00
|
|
|
suite.addTest(unittest.makeSuite(AssociateErrorTestCase))
|
|
|
|
|
2002-12-30 16:53:52 -04:00
|
|
|
suite.addTest(unittest.makeSuite(AssociateHashTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(AssociateBTreeTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(AssociateRecnoTestCase))
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2005-06-08 01:35:50 -03:00
|
|
|
if db.version() >= (4, 1):
|
|
|
|
suite.addTest(unittest.makeSuite(AssociateBTreeTxnTestCase))
|
2005-06-06 06:52:10 -03:00
|
|
|
|
2002-12-30 16:53:52 -04:00
|
|
|
suite.addTest(unittest.makeSuite(ShelveAssociateHashTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(ShelveAssociateBTreeTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(ShelveAssociateRecnoTestCase))
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
if have_threads:
|
2002-12-30 16:53:52 -04:00
|
|
|
suite.addTest(unittest.makeSuite(ThreadedAssociateHashTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(ThreadedAssociateBTreeTestCase))
|
|
|
|
suite.addTest(unittest.makeSuite(ThreadedAssociateRecnoTestCase))
|
2002-11-19 13:47:07 -04:00
|
|
|
|
2002-12-30 16:53:52 -04:00
|
|
|
return suite
|
2002-11-19 13:47:07 -04:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2002-12-30 16:53:52 -04:00
|
|
|
unittest.main(defaultTest='test_suite')
|