Get rid of dict.has_key(). Boy this has a lot of repercussions!

Not all code has been fixed yet; this is just a checkpoint...
The C API still has PyDict_HasKey() and _HasKeyString(); not sure
if I want to change those just yet.
This commit is contained in:
Guido van Rossum 2006-08-18 22:13:04 +00:00
parent d2dbecb4ae
commit e2b70bcf74
93 changed files with 215 additions and 313 deletions

View File

@ -1115,7 +1115,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
/* /*
On success, return 1 if the mapping object has the key, key, On success, return 1 if the mapping object has the key, key,
and 0 otherwise. This is equivalent to the Python expression: and 0 otherwise. This is equivalent to the Python expression:
o.has_key(key). key in o.
This function always succeeds. This function always succeeds.
*/ */
@ -1125,7 +1125,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
/* /*
Return 1 if the mapping object has the key, key, Return 1 if the mapping object has the key, key,
and 0 otherwise. This is equivalent to the Python expression: and 0 otherwise. This is equivalent to the Python expression:
o.has_key(key). key in o.
This function always succeeds. This function always succeeds.

View File

@ -174,7 +174,7 @@ class XMLRPCDocGenerator:
methods = {} methods = {}
for method_name in self.system_listMethods(): for method_name in self.system_listMethods():
if self.funcs.has_key(method_name): if method_name in self.funcs:
method = self.funcs[method_name] method = self.funcs[method_name]
elif self.instance is not None: elif self.instance is not None:
method_info = [None, None] # argspec, documentation method_info = [None, None] # argspec, documentation

View File

@ -313,7 +313,7 @@ class SimpleXMLRPCDispatcher:
Returns a string containing documentation for the specified method.""" Returns a string containing documentation for the specified method."""
method = None method = None
if self.funcs.has_key(method_name): if method_name in self.funcs:
method = self.funcs[method_name] method = self.funcs[method_name]
elif self.instance is not None: elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method # Instance can implement _methodHelp to return help for a method

View File

@ -41,7 +41,6 @@ class UserDict:
def iterkeys(self): return self.data.iterkeys() def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues() def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values() def values(self): return self.data.values()
def has_key(self, key): return self.data.has_key(key)
def update(self, dict=None, **kwargs): def update(self, dict=None, **kwargs):
if dict is None: if dict is None:
pass pass
@ -55,11 +54,11 @@ class UserDict:
if len(kwargs): if len(kwargs):
self.data.update(kwargs) self.data.update(kwargs)
def get(self, key, failobj=None): def get(self, key, failobj=None):
if not self.has_key(key): if key not in self:
return failobj return failobj
return self[key] return self[key]
def setdefault(self, key, failobj=None): def setdefault(self, key, failobj=None):
if not self.has_key(key): if key not in self:
self[key] = failobj self[key] = failobj
return self[key] return self[key]
def pop(self, key, *args): def pop(self, key, *args):
@ -91,14 +90,12 @@ class DictMixin:
def __iter__(self): def __iter__(self):
for k in self.keys(): for k in self.keys():
yield k yield k
def has_key(self, key): def __contains__(self, key):
try: try:
value = self[key] value = self[key]
except KeyError: except KeyError:
return False return False
return True return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions # third level takes advantage of second level definitions
def iteritems(self): def iteritems(self):

View File

@ -247,7 +247,7 @@ class dispatcher:
fd = self._fileno fd = self._fileno
if map is None: if map is None:
map = self._map map = self._map
if map.has_key(fd): if fd in map:
#self.log_info('closing channel %d:%s' % (fd, self)) #self.log_info('closing channel %d:%s' % (fd, self))
del map[fd] del map[fd]
self._fileno = None self._fileno = None

View File

@ -133,8 +133,7 @@ class Bdb:
raise NotImplementedError, "subclass of bdb must implement do_clear()" raise NotImplementedError, "subclass of bdb must implement do_clear()"
def break_anywhere(self, frame): def break_anywhere(self, frame):
return self.breaks.has_key( return self.canonic(frame.f_code.co_filename) in self.breaks
self.canonic(frame.f_code.co_filename))
# Derived classes should override the user_* methods # Derived classes should override the user_* methods
# to gain control. # to gain control.
@ -245,7 +244,7 @@ class Bdb:
# pair, then remove the breaks entry # pair, then remove the breaks entry
for bp in Breakpoint.bplist[filename, lineno][:]: for bp in Breakpoint.bplist[filename, lineno][:]:
bp.deleteMe() bp.deleteMe()
if not Breakpoint.bplist.has_key((filename, lineno)): if (filename, lineno) not in Breakpoint.bplist:
self.breaks[filename].remove(lineno) self.breaks[filename].remove(lineno)
if not self.breaks[filename]: if not self.breaks[filename]:
del self.breaks[filename] del self.breaks[filename]
@ -453,7 +452,7 @@ class Breakpoint:
Breakpoint.next = Breakpoint.next + 1 Breakpoint.next = Breakpoint.next + 1
# Build the two lists # Build the two lists
self.bpbynumber.append(self) self.bpbynumber.append(self)
if self.bplist.has_key((file, line)): if (file, line) in self.bplist:
self.bplist[file, line].append(self) self.bplist[file, line].append(self)
else: else:
self.bplist[file, line] = [self] self.bplist[file, line] = [self]

View File

@ -255,6 +255,8 @@ class _DBWithCursor(_iter_mixin):
self._checkOpen() self._checkOpen()
return _DeadlockWrap(self.db.has_key, key) return _DeadlockWrap(self.db.has_key, key)
__contains__ = has_key
def set_location(self, key): def set_location(self, key):
self._checkOpen() self._checkOpen()
self._checkCursor() self._checkCursor()

View File

@ -21,7 +21,7 @@
# added to _bsddb.c. # added to _bsddb.c.
# #
import db from . import db
try: try:
from UserDict import DictMixin from UserDict import DictMixin
@ -161,6 +161,8 @@ class DB(DictMixin):
return self._cobj.key_range(*args, **kwargs) return self._cobj.key_range(*args, **kwargs)
def has_key(self, *args, **kwargs): def has_key(self, *args, **kwargs):
return self._cobj.has_key(*args, **kwargs) return self._cobj.has_key(*args, **kwargs)
def __contains__(self, key):
return self._cobj.has_key(key)
def items(self, *args, **kwargs): def items(self, *args, **kwargs):
return self._cobj.items(*args, **kwargs) return self._cobj.items(*args, **kwargs)
def keys(self, *args, **kwargs): def keys(self, *args, **kwargs):

View File

@ -35,7 +35,7 @@ try:
except ImportError: except ImportError:
# DictMixin is new in Python 2.3 # DictMixin is new in Python 2.3
class DictMixin: pass class DictMixin: pass
import db from . import db
#------------------------------------------------------------------------ #------------------------------------------------------------------------
@ -197,6 +197,10 @@ class DBShelf(DictMixin):
raise NotImplementedError raise NotImplementedError
def __contains__(self, key):
return self.has_key(key)
#---------------------------------------------- #----------------------------------------------
# Methods allowed to pass-through to self.db # Methods allowed to pass-through to self.db
# #

View File

@ -55,7 +55,7 @@ def DeadlockWrap(function, *_args, **_kwargs):
""" """
sleeptime = _deadlock_MinSleepTime sleeptime = _deadlock_MinSleepTime
max_retries = _kwargs.get('max_retries', -1) max_retries = _kwargs.get('max_retries', -1)
if _kwargs.has_key('max_retries'): if 'max_tries' in _kwargs:
del _kwargs['max_retries'] del _kwargs['max_retries']
while True: while True:
try: try:

View File

@ -41,8 +41,12 @@ class PrintInfoFakeTest(unittest.TestCase):
# This little hack is for when this module is run as main and all the # This little hack is for when this module is run as main and all the
# other modules import it so they will still be able to get the right # other modules import it so they will still be able to get the right
# verbose setting. It's confusing but it works. # verbose setting. It's confusing but it works.
import test_all try:
test_all.verbose = verbose import test_all
except ImportError:
pass
else:
test_all.verbose = verbose
def suite(): def suite():

View File

@ -14,7 +14,7 @@ except ImportError:
have_threads = 0 have_threads = 0
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -20,7 +20,7 @@ except ImportError:
# For Python 2.3 # For Python 2.3
from bsddb import db from bsddb import db
from test_all import verbose from .test_all import verbose
DASH = '-' DASH = '-'

View File

@ -3,9 +3,10 @@ TestCases for python DB Btree key comparison function.
""" """
import sys, os, re import sys, os, re
import test_all
from cStringIO import StringIO from cStringIO import StringIO
from . import test_all
import unittest import unittest
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -7,7 +7,7 @@ import sys, os, string
import unittest import unittest
import tempfile import tempfile
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -15,7 +15,7 @@ except ImportError:
# For Python 2.3 # For Python 2.3
from bsddb import db, dbshelve from bsddb import db, dbshelve
from test_all import verbose from .test_all import verbose
#---------------------------------------------------------------------- #----------------------------------------------------------------------

View File

@ -28,7 +28,7 @@ except ImportError:
import pickle import pickle
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -15,7 +15,7 @@ except ImportError:
# For Python 2.3 # For Python 2.3
from bsddb import db from bsddb import db
from test_all import verbose from .test_all import verbose
# We're going to get warnings in this module about trying to close the db when # We're going to get warnings in this module about trying to close the db when
# its env is already closed. Let's just ignore those. # its env is already closed. Let's just ignore those.

View File

@ -14,7 +14,7 @@ except ImportError:
# For Python 2.3 # For Python 2.3
from bsddb import db from bsddb import db
from test_all import verbose from .test_all import verbose
#---------------------------------------------------------------------- #----------------------------------------------------------------------

View File

@ -13,7 +13,7 @@ except ImportError:
have_threads = 0 have_threads = 0
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -15,7 +15,7 @@ except ImportError:
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -14,7 +14,7 @@ except ImportError:
# For Python 2.3 # For Python 2.3
from bsddb import db from bsddb import db
from test_all import verbose from .test_all import verbose
#---------------------------------------------------------------------- #----------------------------------------------------------------------

View File

@ -8,7 +8,7 @@ import tempfile
from pprint import pprint from pprint import pprint
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -10,7 +10,7 @@ try:
except ImportError: except ImportError:
from bsddb import db from bsddb import db
from test_all import verbose from .test_all import verbose
class DBSequenceTest(unittest.TestCase): class DBSequenceTest(unittest.TestCase):

View File

@ -31,7 +31,7 @@ except NameError:
pass pass
import unittest import unittest
from test_all import verbose from .test_all import verbose
try: try:
# For Pythons w/distutils pybsddb # For Pythons w/distutils pybsddb

View File

@ -608,14 +608,6 @@ class FieldStorage:
if item.name not in keys: keys.append(item.name) if item.name not in keys: keys.append(item.name)
return keys return keys
def has_key(self, key):
"""Dictionary style has_key() method."""
if self.list is None:
raise TypeError, "not indexable"
for item in self.list:
if item.name == key: return True
return False
def __contains__(self, key): def __contains__(self, key):
"""Dictionary style __contains__ method.""" """Dictionary style __contains__ method."""
if self.list is None: if self.list is None:

View File

@ -199,7 +199,7 @@ class SequenceMatcher:
# DON'T USE! Only __chain_b uses this. Use isbjunk. # DON'T USE! Only __chain_b uses this. Use isbjunk.
# isbjunk # isbjunk
# for x in b, isbjunk(x) == isjunk(x) but much faster; # for x in b, isbjunk(x) == isjunk(x) but much faster;
# it's really the has_key method of a hidden dict. # it's really the __contains__ method of a hidden dict.
# DOES NOT WORK for x in a! # DOES NOT WORK for x in a!
# isbpopular # isbpopular
# for x in b, isbpopular(x) is true iff b is reasonably long # for x in b, isbpopular(x) is true iff b is reasonably long
@ -341,8 +341,8 @@ class SequenceMatcher:
# lot of junk in the sequence, the number of *unique* junk # lot of junk in the sequence, the number of *unique* junk
# elements is probably small. So the memory burden of keeping # elements is probably small. So the memory burden of keeping
# this dict alive is likely trivial compared to the size of b2j. # this dict alive is likely trivial compared to the size of b2j.
self.isbjunk = junkdict.has_key self.isbjunk = junkdict.__contains__
self.isbpopular = populardict.has_key self.isbpopular = populardict.__contains__
def find_longest_match(self, alo, ahi, blo, bhi): def find_longest_match(self, alo, ahi, blo, bhi):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi]. """Find longest matching block in a[alo:ahi] and b[blo:bhi].
@ -674,7 +674,7 @@ class SequenceMatcher:
# avail[x] is the number of times x appears in 'b' less the # avail[x] is the number of times x appears in 'b' less the
# number of times we've seen it in 'a' so far ... kinda # number of times we've seen it in 'a' so far ... kinda
avail = {} avail = {}
availhas, matches = avail.has_key, 0 availhas, matches = avail.__contains__, 0
for elt in self.a: for elt in self.a:
if availhas(elt): if availhas(elt):
numb = avail[elt] numb = avail[elt]

View File

@ -124,7 +124,7 @@ ARCHIVE_FORMATS = {
def check_archive_formats (formats): def check_archive_formats (formats):
for format in formats: for format in formats:
if not ARCHIVE_FORMATS.has_key(format): if format not in ARCHIVE_FORMATS:
return format return format
else: else:
return None return None

View File

@ -159,7 +159,7 @@ class CCompiler:
# basically the same things with Unix C compilers. # basically the same things with Unix C compilers.
for key in args.keys(): for key in args.keys():
if not self.executables.has_key(key): if key not in self.executables:
raise ValueError, \ raise ValueError, \
"unknown executable '%s' for class %s" % \ "unknown executable '%s' for class %s" % \
(key, self.__class__.__name__) (key, self.__class__.__name__)

View File

@ -341,7 +341,7 @@ class build_ext (Command):
# Medium-easy stuff: same syntax/semantics, different names. # Medium-easy stuff: same syntax/semantics, different names.
ext.runtime_library_dirs = build_info.get('rpath') ext.runtime_library_dirs = build_info.get('rpath')
if build_info.has_key('def_file'): if 'def_file' in build_info:
log.warn("'def_file' element of build info dict " log.warn("'def_file' element of build info dict "
"no longer supported") "no longer supported")

View File

@ -101,9 +101,9 @@ def setup (**attrs):
else: else:
klass = Distribution klass = Distribution
if not attrs.has_key('script_name'): if 'script_name' not in attrs:
attrs['script_name'] = os.path.basename(sys.argv[0]) attrs['script_name'] = os.path.basename(sys.argv[0])
if not attrs.has_key('script_args'): if 'script_args' not in attrs:
attrs['script_args'] = sys.argv[1:] attrs['script_args'] = sys.argv[1:]
# Create the Distribution instance, using the remaining arguments # Create the Distribution instance, using the remaining arguments
@ -111,7 +111,7 @@ def setup (**attrs):
try: try:
_setup_distribution = dist = klass(attrs) _setup_distribution = dist = klass(attrs)
except DistutilsSetupError, msg: except DistutilsSetupError, msg:
if attrs.has_key('name'): if 'name' not in attrs:
raise SystemExit, "error in %s setup command: %s" % \ raise SystemExit, "error in %s setup command: %s" % \
(attrs['name'], msg) (attrs['name'], msg)
else: else:

View File

@ -239,7 +239,7 @@ Common commands: (see '--help-commands' for more)
for (opt, val) in cmd_options.items(): for (opt, val) in cmd_options.items():
opt_dict[opt] = ("setup script", val) opt_dict[opt] = ("setup script", val)
if attrs.has_key('licence'): if 'licence' in attrs:
attrs['license'] = attrs['licence'] attrs['license'] = attrs['licence']
del attrs['licence'] del attrs['licence']
msg = "'licence' distribution option is deprecated; use 'license'" msg = "'licence' distribution option is deprecated; use 'license'"
@ -343,7 +343,7 @@ Common commands: (see '--help-commands' for more)
user_filename = "pydistutils.cfg" user_filename = "pydistutils.cfg"
# And look for the user config file # And look for the user config file
if os.environ.has_key('HOME'): if 'HOME' in os.environ:
user_file = os.path.join(os.environ.get('HOME'), user_filename) user_file = os.path.join(os.environ.get('HOME'), user_filename)
if os.path.isfile(user_file): if os.path.isfile(user_file):
files.append(user_file) files.append(user_file)
@ -388,7 +388,7 @@ Common commands: (see '--help-commands' for more)
# If there was a "global" section in the config file, use it # If there was a "global" section in the config file, use it
# to set Distribution options. # to set Distribution options.
if self.command_options.has_key('global'): if 'global' in self.command_options:
for (opt, (src, val)) in self.command_options['global'].items(): for (opt, (src, val)) in self.command_options['global'].items():
alias = self.negative_opt.get(opt) alias = self.negative_opt.get(opt)
try: try:
@ -907,7 +907,7 @@ Common commands: (see '--help-commands' for more)
try: try:
is_string = type(value) is StringType is_string = type(value) is StringType
if neg_opt.has_key(option) and is_string: if option in neg_opt and is_string:
setattr(command_obj, neg_opt[option], not strtobool(value)) setattr(command_obj, neg_opt[option], not strtobool(value))
elif option in bool_opts and is_string: elif option in bool_opts and is_string:
setattr(command_obj, option, strtobool(value)) setattr(command_obj, option, strtobool(value))

View File

@ -97,7 +97,7 @@ class FancyGetopt:
self._build_index() self._build_index()
def add_option (self, long_option, short_option=None, help_string=None): def add_option (self, long_option, short_option=None, help_string=None):
if self.option_index.has_key(long_option): if long_option in self.option_index:
raise DistutilsGetoptError, \ raise DistutilsGetoptError, \
"option conflict: already an option '%s'" % long_option "option conflict: already an option '%s'" % long_option
else: else:
@ -109,7 +109,7 @@ class FancyGetopt:
def has_option (self, long_option): def has_option (self, long_option):
"""Return true if the option table for this parser has an """Return true if the option table for this parser has an
option with long name 'long_option'.""" option with long name 'long_option'."""
return self.option_index.has_key(long_option) return long_option in self.option_index
def get_attr_name (self, long_option): def get_attr_name (self, long_option):
"""Translate long option name 'long_option' to the form it """Translate long option name 'long_option' to the form it
@ -121,11 +121,11 @@ class FancyGetopt:
def _check_alias_dict (self, aliases, what): def _check_alias_dict (self, aliases, what):
assert type(aliases) is DictionaryType assert type(aliases) is DictionaryType
for (alias, opt) in aliases.items(): for (alias, opt) in aliases.items():
if not self.option_index.has_key(alias): if alias not in self.option_index:
raise DistutilsGetoptError, \ raise DistutilsGetoptError, \
("invalid %s '%s': " ("invalid %s '%s': "
"option '%s' not defined") % (what, alias, alias) "option '%s' not defined") % (what, alias, alias)
if not self.option_index.has_key(opt): if opt not in self.option_index:
raise DistutilsGetoptError, \ raise DistutilsGetoptError, \
("invalid %s '%s': " ("invalid %s '%s': "
"aliased option '%s' not defined") % (what, alias, opt) "aliased option '%s' not defined") % (what, alias, opt)

View File

@ -150,22 +150,22 @@ def customize_compiler(compiler):
get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
'CCSHARED', 'LDSHARED', 'SO') 'CCSHARED', 'LDSHARED', 'SO')
if os.environ.has_key('CC'): if 'CC' in os.environ:
cc = os.environ['CC'] cc = os.environ['CC']
if os.environ.has_key('CXX'): if 'CXX' in os.environ:
cxx = os.environ['CXX'] cxx = os.environ['CXX']
if os.environ.has_key('LDSHARED'): if 'LDSHARED' in os.environ:
ldshared = os.environ['LDSHARED'] ldshared = os.environ['LDSHARED']
if os.environ.has_key('CPP'): if 'CPP' in os.environ:
cpp = os.environ['CPP'] cpp = os.environ['CPP']
else: else:
cpp = cc + " -E" # not always cpp = cc + " -E" # not always
if os.environ.has_key('LDFLAGS'): if 'LDFLAGS' in os.environ:
ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if os.environ.has_key('CFLAGS'): if 'CFLAGS' in os.environ:
cflags = opt + ' ' + os.environ['CFLAGS'] cflags = opt + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS'] ldshared = ldshared + ' ' + os.environ['CFLAGS']
if os.environ.has_key('CPPFLAGS'): if 'CPPFLAGS' in os.environ:
cpp = cpp + ' ' + os.environ['CPPFLAGS'] cpp = cpp + ' ' + os.environ['CPPFLAGS']
cflags = cflags + ' ' + os.environ['CPPFLAGS'] cflags = cflags + ' ' + os.environ['CPPFLAGS']
ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
@ -277,12 +277,12 @@ def parse_makefile(fn, g=None):
if m: if m:
n = m.group(1) n = m.group(1)
found = True found = True
if done.has_key(n): if n in done:
item = str(done[n]) item = str(done[n])
elif notdone.has_key(n): elif n in notdone:
# get it on a subsequent round # get it on a subsequent round
found = False found = False
elif os.environ.has_key(n): elif n in os.environ:
# do it like make: fall back to environment # do it like make: fall back to environment
item = os.environ[n] item = os.environ[n]
else: else:
@ -366,7 +366,7 @@ def _init_posix():
# MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
# it needs to be compatible. # it needs to be compatible.
# If it isn't set we set it to the configure-time value # If it isn't set we set it to the configure-time value
if sys.platform == 'darwin' and g.has_key('MACOSX_DEPLOYMENT_TARGET'): if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g:
cfg_target = g['MACOSX_DEPLOYMENT_TARGET'] cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '') cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
if cur_target == '': if cur_target == '':

View File

@ -89,7 +89,7 @@ class TextFile:
# set values for all options -- either from client option hash # set values for all options -- either from client option hash
# or fallback to default_options # or fallback to default_options
for opt in self.default_options.keys(): for opt in self.default_options.keys():
if options.has_key (opt): if opt in options:
setattr (self, opt, options[opt]) setattr (self, opt, options[opt])
else: else:
@ -97,7 +97,7 @@ class TextFile:
# sanity check client option hash # sanity check client option hash
for opt in options.keys(): for opt in options.keys():
if not self.default_options.has_key (opt): if opt not in self.default_options:
raise KeyError, "invalid TextFile option '%s'" % opt raise KeyError, "invalid TextFile option '%s'" % opt
if file is None: if file is None:

View File

@ -200,11 +200,11 @@ def check_environ ():
if _environ_checked: if _environ_checked:
return return
if os.name == 'posix' and not os.environ.has_key('HOME'): if os.name == 'posix' and 'HOME' not in os.environ:
import pwd import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if not os.environ.has_key('PLAT'): if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform() os.environ['PLAT'] = get_platform()
_environ_checked = 1 _environ_checked = 1
@ -222,7 +222,7 @@ def subst_vars (s, local_vars):
check_environ() check_environ()
def _subst (match, local_vars=local_vars): def _subst (match, local_vars=local_vars):
var_name = match.group(1) var_name = match.group(1)
if local_vars.has_key(var_name): if var_name in local_vars:
return str(local_vars[var_name]) return str(local_vars[var_name])
else: else:
return os.environ[var_name] return os.environ[var_name]

View File

@ -195,9 +195,6 @@ class _Database(UserDict.DictMixin):
def keys(self): def keys(self):
return self._index.keys() return self._index.keys()
def has_key(self, key):
return key in self._index
def __contains__(self, key): def __contains__(self, key):
return key in self._index return key in self._index

View File

@ -120,13 +120,10 @@ class Mailbox:
"""Return a list of (key, message) tuples. Memory intensive.""" """Return a list of (key, message) tuples. Memory intensive."""
return list(self.iteritems()) return list(self.iteritems())
def has_key(self, key): def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise.""" """Return True if the keyed message exists, False otherwise."""
raise NotImplementedError('Method must be implemented by subclass') raise NotImplementedError('Method must be implemented by subclass')
def __contains__(self, key):
return self.has_key(key)
def __len__(self): def __len__(self):
"""Return a count of messages in the mailbox.""" """Return a count of messages in the mailbox."""
raise NotImplementedError('Method must be implemented by subclass') raise NotImplementedError('Method must be implemented by subclass')
@ -330,7 +327,7 @@ class Maildir(Mailbox):
continue continue
yield key yield key
def has_key(self, key): def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise.""" """Return True if the keyed message exists, False otherwise."""
self._refresh() self._refresh()
return key in self._toc return key in self._toc
@ -515,7 +512,7 @@ class _singlefileMailbox(Mailbox):
for key in self._toc.keys(): for key in self._toc.keys():
yield key yield key
def has_key(self, key): def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise.""" """Return True if the keyed message exists, False otherwise."""
self._lookup() self._lookup()
return key in self._toc return key in self._toc
@ -902,7 +899,7 @@ class MH(Mailbox):
return iter(sorted(int(entry) for entry in os.listdir(self._path) return iter(sorted(int(entry) for entry in os.listdir(self._path)
if entry.isdigit())) if entry.isdigit()))
def has_key(self, key): def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise.""" """Return True if the keyed message exists, False otherwise."""
return os.path.exists(os.path.join(self._path, str(key))) return os.path.exists(os.path.join(self._path, str(key)))

View File

@ -242,7 +242,7 @@ class ModuleFinder:
else: else:
self.msgout(3, "import_module ->", m) self.msgout(3, "import_module ->", m)
return m return m
if self.badmodules.has_key(fqname): if fqname in self.badmodules:
self.msgout(3, "import_module -> None") self.msgout(3, "import_module -> None")
return None return None
if parent and parent.__path__ is None: if parent and parent.__path__ is None:
@ -388,7 +388,7 @@ class ModuleFinder:
return m return m
def add_module(self, fqname): def add_module(self, fqname):
if self.modules.has_key(fqname): if fqname in self.modules:
return self.modules[fqname] return self.modules[fqname]
self.modules[fqname] = m = Module(fqname) self.modules[fqname] = m = Module(fqname)
return m return m

View File

@ -602,7 +602,7 @@ class Option:
def _set_attrs(self, attrs): def _set_attrs(self, attrs):
for attr in self.ATTRS: for attr in self.ATTRS:
if attrs.has_key(attr): if attr in attrs:
setattr(self, attr, attrs[attr]) setattr(self, attr, attrs[attr])
del attrs[attr] del attrs[attr]
else: else:
@ -854,7 +854,7 @@ class Values:
are silently ignored. are silently ignored.
""" """
for attr in dir(self): for attr in dir(self):
if dict.has_key(attr): if attr in dict:
dval = dict[attr] dval = dict[attr]
if dval is not None: if dval is not None:
setattr(self, attr, dval) setattr(self, attr, dval)
@ -974,10 +974,10 @@ class OptionContainer:
def _check_conflict(self, option): def _check_conflict(self, option):
conflict_opts = [] conflict_opts = []
for opt in option._short_opts: for opt in option._short_opts:
if self._short_opt.has_key(opt): if opt in self._short_opt:
conflict_opts.append((opt, self._short_opt[opt])) conflict_opts.append((opt, self._short_opt[opt]))
for opt in option._long_opts: for opt in option._long_opts:
if self._long_opt.has_key(opt): if opt in self._long_opt:
conflict_opts.append((opt, self._long_opt[opt])) conflict_opts.append((opt, self._long_opt[opt]))
if conflict_opts: if conflict_opts:
@ -1023,7 +1023,7 @@ class OptionContainer:
if option.dest is not None: # option has a dest, we need a default if option.dest is not None: # option has a dest, we need a default
if option.default is not NO_DEFAULT: if option.default is not NO_DEFAULT:
self.defaults[option.dest] = option.default self.defaults[option.dest] = option.default
elif not self.defaults.has_key(option.dest): elif option.dest not in self.defaults:
self.defaults[option.dest] = None self.defaults[option.dest] = None
return option return option
@ -1039,8 +1039,8 @@ class OptionContainer:
self._long_opt.get(opt_str)) self._long_opt.get(opt_str))
def has_option(self, opt_str): def has_option(self, opt_str):
return (self._short_opt.has_key(opt_str) or return (opt_str in self._short_opt or
self._long_opt.has_key(opt_str)) opt_str) in self._long_opt
def remove_option(self, opt_str): def remove_option(self, opt_str):
option = self._short_opt.get(opt_str) option = self._short_opt.get(opt_str)
@ -1658,7 +1658,7 @@ def _match_abbrev(s, wordmap):
'words', raise BadOptionError. 'words', raise BadOptionError.
""" """
# Is there an exact match? # Is there an exact match?
if wordmap.has_key(s): if s in wordmap:
return s return s
else: else:
# Isolate all words with s as a prefix. # Isolate all words with s as a prefix.

View File

@ -436,8 +436,6 @@ else:
def __delitem__(self, key): def __delitem__(self, key):
unsetenv(key) unsetenv(key)
del self.data[key.upper()] del self.data[key.upper()]
def has_key(self, key):
return key.upper() in self.data
def __contains__(self, key): def __contains__(self, key):
return key.upper() in self.data return key.upper() in self.data
def get(self, key, failobj=None): def get(self, key, failobj=None):

View File

@ -1287,19 +1287,19 @@ def decode_long(data):
r"""Decode a long from a two's complement little-endian binary string. r"""Decode a long from a two's complement little-endian binary string.
>>> decode_long('') >>> decode_long('')
0L 0
>>> decode_long("\xff\x00") >>> decode_long("\xff\x00")
255L 255
>>> decode_long("\xff\x7f") >>> decode_long("\xff\x7f")
32767L 32767
>>> decode_long("\x00\xff") >>> decode_long("\x00\xff")
-256L -256
>>> decode_long("\x00\x80") >>> decode_long("\x00\x80")
-32768L -32768
>>> decode_long("\x80") >>> decode_long("\x80")
-128L -128
>>> decode_long("\x7f") >>> decode_long("\x7f")
127L 127
""" """
nbytes = len(data) nbytes = len(data)

View File

@ -517,23 +517,14 @@ def read_decimalnl_long(f):
r""" r"""
>>> import StringIO >>> import StringIO
>>> read_decimalnl_long(StringIO.StringIO("1234\n56"))
Traceback (most recent call last):
...
ValueError: trailing 'L' required in '1234'
Someday the trailing 'L' will probably go away from this output.
>>> read_decimalnl_long(StringIO.StringIO("1234L\n56")) >>> read_decimalnl_long(StringIO.StringIO("1234L\n56"))
1234L 1234
>>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6")) >>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6"))
123456789012345678901234L 123456789012345678901234
""" """
s = read_stringnl(f, decode=False, stripquotes=False) s = read_stringnl(f, decode=False, stripquotes=False)
if not s.endswith("L"):
raise ValueError("trailing 'L' required in %r" % s)
return long(s) return long(s)
@ -625,15 +616,15 @@ def read_long1(f):
r""" r"""
>>> import StringIO >>> import StringIO
>>> read_long1(StringIO.StringIO("\x00")) >>> read_long1(StringIO.StringIO("\x00"))
0L 0
>>> read_long1(StringIO.StringIO("\x02\xff\x00")) >>> read_long1(StringIO.StringIO("\x02\xff\x00"))
255L 255
>>> read_long1(StringIO.StringIO("\x02\xff\x7f")) >>> read_long1(StringIO.StringIO("\x02\xff\x7f"))
32767L 32767
>>> read_long1(StringIO.StringIO("\x02\x00\xff")) >>> read_long1(StringIO.StringIO("\x02\x00\xff"))
-256L -256
>>> read_long1(StringIO.StringIO("\x02\x00\x80")) >>> read_long1(StringIO.StringIO("\x02\x00\x80"))
-32768L -32768
""" """
n = read_uint1(f) n = read_uint1(f)
@ -657,15 +648,15 @@ def read_long4(f):
r""" r"""
>>> import StringIO >>> import StringIO
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00")) >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00"))
255L 255
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f")) >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f"))
32767L 32767
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff")) >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff"))
-256L -256
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80")) >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80"))
-32768L -32768
>>> read_long1(StringIO.StringIO("\x00\x00\x00\x00")) >>> read_long1(StringIO.StringIO("\x00\x00\x00\x00"))
0L 0
""" """
n = read_int4(f) n = read_int4(f)

View File

@ -877,7 +877,7 @@ def architecture(executable=sys.executable,bits='',linkage=''):
executable == sys.executable: executable == sys.executable:
# "file" command did not return anything; we'll try to provide # "file" command did not return anything; we'll try to provide
# some sensible defaults then... # some sensible defaults then...
if _default_architecture.has_key(sys.platform): if sys.platform in _default_architecture:
b,l = _default_architecture[sys.platform] b,l = _default_architecture[sys.platform]
if b: if b:
bits = b bits = b

View File

@ -318,7 +318,7 @@ class Profile:
fn = ("", 0, self.c_func_name) fn = ("", 0, self.c_func_name)
self.cur = (t, 0, 0, fn, frame, self.cur) self.cur = (t, 0, 0, fn, frame, self.cur)
timings = self.timings timings = self.timings
if timings.has_key(fn): if fn in timings:
cc, ns, tt, ct, callers = timings[fn] cc, ns, tt, ct, callers = timings[fn]
timings[fn] = cc, ns+1, tt, ct, callers timings[fn] = cc, ns+1, tt, ct, callers
else: else:

View File

@ -140,7 +140,7 @@ class Stats:
self.total_calls += nc self.total_calls += nc
self.prim_calls += cc self.prim_calls += cc
self.total_tt += tt self.total_tt += tt
if callers.has_key(("jprofile", 0, "profiler")): if ("jprofile", 0, "profiler") in callers:
self.top_level[func] = None self.top_level[func] = None
if len(func_std_string(func)) > self.max_name_len: if len(func_std_string(func)) > self.max_name_len:
self.max_name_len = len(func_std_string(func)) self.max_name_len = len(func_std_string(func))

View File

@ -428,10 +428,6 @@ class Message:
self.dict[lowername] = default self.dict[lowername] = default
return default return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name): def __contains__(self, name):
"""Determine whether a message contains the named header.""" """Determine whether a message contains the named header."""
return name.lower() in self.dict return name.lower() in self.dict

View File

@ -231,7 +231,7 @@ class BaseSet(object):
little, big = self, other little, big = self, other
else: else:
little, big = other, self little, big = other, self
common = ifilter(big._data.has_key, little) common = ifilter(big._data.__contains__, little)
return self.__class__(common) return self.__class__(common)
def __xor__(self, other): def __xor__(self, other):
@ -256,9 +256,9 @@ class BaseSet(object):
otherdata = other._data otherdata = other._data
except AttributeError: except AttributeError:
otherdata = Set(other)._data otherdata = Set(other)._data
for elt in ifilterfalse(otherdata.has_key, selfdata): for elt in ifilterfalse(otherdata.__contains__, selfdata):
data[elt] = value data[elt] = value
for elt in ifilterfalse(selfdata.has_key, otherdata): for elt in ifilterfalse(selfdata.__contains__, otherdata):
data[elt] = value data[elt] = value
return result return result
@ -283,7 +283,7 @@ class BaseSet(object):
except AttributeError: except AttributeError:
otherdata = Set(other)._data otherdata = Set(other)._data
value = True value = True
for elt in ifilterfalse(otherdata.has_key, self): for elt in ifilterfalse(otherdata.__contains__, self):
data[elt] = value data[elt] = value
return result return result
@ -309,7 +309,7 @@ class BaseSet(object):
self._binary_sanity_check(other) self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases if len(self) > len(other): # Fast check for obvious cases
return False return False
for elt in ifilterfalse(other._data.has_key, self): for elt in ifilterfalse(other._data.__contains__, self):
return False return False
return True return True
@ -318,7 +318,7 @@ class BaseSet(object):
self._binary_sanity_check(other) self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases if len(self) < len(other): # Fast check for obvious cases
return False return False
for elt in ifilterfalse(self._data.has_key, other): for elt in ifilterfalse(self._data.__contains__, other):
return False return False
return True return True
@ -501,7 +501,7 @@ class Set(BaseSet):
other = Set(other) other = Set(other)
if self is other: if self is other:
self.clear() self.clear()
for elt in ifilter(data.has_key, other): for elt in ifilter(data.__contains__, other):
del data[elt] del data[elt]
# Python dict-like mass mutations: update, clear # Python dict-like mass mutations: update, clear

View File

@ -20,7 +20,7 @@ object):
# access returns a *copy* of the entry! # access returns a *copy* of the entry!
del d[key] # delete data stored at key (raises KeyError del d[key] # delete data stored at key (raises KeyError
# if no such key) # if no such key)
flag = d.has_key(key) # true if the key exists; same as "key in d" flag = key in d # true if the key exists
list = d.keys() # a list of all existing keys (slow!) list = d.keys() # a list of all existing keys (slow!)
d.close() # close it d.close() # close it
@ -94,14 +94,11 @@ class Shelf(UserDict.DictMixin):
def __len__(self): def __len__(self):
return len(self.dict) return len(self.dict)
def has_key(self, key):
return self.dict.has_key(key)
def __contains__(self, key): def __contains__(self, key):
return self.dict.has_key(key) return key in self.dict
def get(self, key, default=None): def get(self, key, default=None):
if self.dict.has_key(key): if key in self.dict:
return self[key] return self[key]
return default return default

View File

@ -54,12 +54,10 @@ class BasicTestMappingProtocol(unittest.TestCase):
#len #len
self.assertEqual(len(p), 0) self.assertEqual(len(p), 0)
self.assertEqual(len(d), len(self.reference)) self.assertEqual(len(d), len(self.reference))
#has_key #__contains__
for k in self.reference: for k in self.reference:
self.assert_(d.has_key(k))
self.assert_(k in d) self.assert_(k in d)
for k in self.other: for k in self.other:
self.failIf(d.has_key(k))
self.failIf(k in d) self.failIf(k in d)
#cmp #cmp
self.assertEqual(cmp(p,p), 0) self.assertEqual(cmp(p,p), 0)
@ -333,16 +331,6 @@ class TestMappingProtocol(BasicTestMappingProtocol):
d = self._full_mapping({1:2}) d = self._full_mapping({1:2})
self.assertEqual(d.items(), [(1, 2)]) self.assertEqual(d.items(), [(1, 2)])
def test_has_key(self):
d = self._empty_mapping()
self.assert_(not d.has_key('a'))
d = self._full_mapping({'a': 1, 'b': 2})
k = d.keys()
k.sort()
self.assertEqual(k, ['a', 'b'])
self.assertRaises(TypeError, d.has_key)
def test_contains(self): def test_contains(self):
d = self._empty_mapping() d = self._empty_mapping()
self.assert_(not ('a' in d)) self.assert_(not ('a' in d))

View File

@ -1034,7 +1034,6 @@ _expectations = {
'darwin': 'darwin':
""" """
test_al test_al
test_bsddb3
test_cd test_cd
test_cl test_cl
test_gdbm test_gdbm

View File

@ -24,7 +24,7 @@ class AllTest(unittest.TestCase):
"%s has no __all__ attribute" % modname) "%s has no __all__ attribute" % modname)
names = {} names = {}
exec "from %s import *" % modname in names exec "from %s import *" % modname in names
if names.has_key("__builtins__"): if "__builtins__" in names:
del names["__builtins__"] del names["__builtins__"]
keys = set(names) keys = set(names)
all = set(sys.modules[modname].__all__) all = set(sys.modules[modname].__all__)

View File

@ -183,9 +183,9 @@ class BoolTest(unittest.TestCase):
self.assertIs(issubclass(bool, int), True) self.assertIs(issubclass(bool, int), True)
self.assertIs(issubclass(int, bool), False) self.assertIs(issubclass(int, bool), False)
def test_haskey(self): def test_contains(self):
self.assertIs({}.has_key(1), False) self.assertIs(1 in {}, False)
self.assertIs({1:1}.has_key(1), True) self.assertIs(1 in {1:1}, True)
def test_string(self): def test_string(self):
self.assertIs("xyz".endswith("z"), True) self.assertIs("xyz".endswith("z"), True)

View File

@ -135,11 +135,6 @@ class TestBSDDB(unittest.TestCase):
self.assert_(k in self.f) self.assert_(k in self.f)
self.assert_('not here' not in self.f) self.assert_('not here' not in self.f)
def test_has_key(self):
for k in self.d:
self.assert_(self.f.has_key(k))
self.assert_(not self.f.has_key('not here'))
def test_clear(self): def test_clear(self):
self.f.clear() self.f.clear()
self.assertEqual(len(self.f), 0) self.assertEqual(len(self.f), 0)

View File

@ -630,9 +630,9 @@ class BuiltinTest(unittest.TestCase):
def test_hex(self): def test_hex(self):
self.assertEqual(hex(16), '0x10') self.assertEqual(hex(16), '0x10')
self.assertEqual(hex(16L), '0x10L') self.assertEqual(hex(16L), '0x10')
self.assertEqual(hex(-16), '-0x10') self.assertEqual(hex(-16), '-0x10')
self.assertEqual(hex(-16L), '-0x10L') self.assertEqual(hex(-16L), '-0x10')
self.assertRaises(TypeError, hex, {}) self.assertRaises(TypeError, hex, {})
def test_id(self): def test_id(self):
@ -1240,9 +1240,9 @@ class BuiltinTest(unittest.TestCase):
def test_oct(self): def test_oct(self):
self.assertEqual(oct(100), '0144') self.assertEqual(oct(100), '0144')
self.assertEqual(oct(100L), '0144L') self.assertEqual(oct(100L), '0144')
self.assertEqual(oct(-100), '-0144') self.assertEqual(oct(-100), '-0144')
self.assertEqual(oct(-100L), '-0144L') self.assertEqual(oct(-100L), '-0144')
self.assertRaises(TypeError, oct, ()) self.assertRaises(TypeError, oct, ())
def write_testfile(self): def write_testfile(self):
@ -1441,7 +1441,7 @@ class BuiltinTest(unittest.TestCase):
def test_repr(self): def test_repr(self):
self.assertEqual(repr(''), '\'\'') self.assertEqual(repr(''), '\'\'')
self.assertEqual(repr(0), '0') self.assertEqual(repr(0), '0')
self.assertEqual(repr(0L), '0L') self.assertEqual(repr(0L), '0')
self.assertEqual(repr(()), '()') self.assertEqual(repr(()), '()')
self.assertEqual(repr([]), '[]') self.assertEqual(repr([]), '[]')
self.assertEqual(repr({}), '{}') self.assertEqual(repr({}), '{}')

View File

@ -9,39 +9,39 @@ from test import test_support
class CFunctionCalls(unittest.TestCase): class CFunctionCalls(unittest.TestCase):
def test_varargs0(self): def test_varargs0(self):
self.assertRaises(TypeError, {}.has_key) self.assertRaises(TypeError, {}.__contains__)
def test_varargs1(self): def test_varargs1(self):
{}.has_key(0) {}.__contains__(0)
def test_varargs2(self): def test_varargs2(self):
self.assertRaises(TypeError, {}.has_key, 0, 1) self.assertRaises(TypeError, {}.__contains__, 0, 1)
def test_varargs0_ext(self): def test_varargs0_ext(self):
try: try:
{}.has_key(*()) {}.__contains__(*())
except TypeError: except TypeError:
pass pass
def test_varargs1_ext(self): def test_varargs1_ext(self):
{}.has_key(*(0,)) {}.__contains__(*(0,))
def test_varargs2_ext(self): def test_varargs2_ext(self):
try: try:
{}.has_key(*(1, 2)) {}.__contains__(*(1, 2))
except TypeError: except TypeError:
pass pass
else: else:
raise RuntimeError raise RuntimeError
def test_varargs0_kw(self): def test_varargs0_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2) self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs1_kw(self): def test_varargs1_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2) self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs2_kw(self): def test_varargs2_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2, y=2) self.assertRaises(TypeError, {}.__contains__, x=2, y=2)
def test_oldargs0_0(self): def test_oldargs0_0(self):
{}.keys() {}.keys()

View File

@ -158,10 +158,10 @@ def main():
# test individual fields # test individual fields
for key in expect.keys(): for key in expect.keys():
expect_val = expect[key] expect_val = expect[key]
verify(fcd.has_key(key)) verify(key in fcd)
verify(norm(fcd[key]) == norm(expect[key])) verify(norm(fcd[key]) == norm(expect[key]))
verify(fcd.get(key, "default") == fcd[key]) verify(fcd.get(key, "default") == fcd[key])
verify(fs.has_key(key)) verify(key in fs)
if len(expect_val) > 1: if len(expect_val) > 1:
single_value = 0 single_value = 0
else: else:

View File

@ -28,7 +28,7 @@ def test_keys():
d['a'] = 'b' d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242' d['12345678910'] = '019237410982340912840198242'
d.keys() d.keys()
if d.has_key('a'): if 'a' in d:
if verbose: if verbose:
print 'Test dbm keys: ', d.keys() print 'Test dbm keys: ', d.keys()

View File

@ -21,8 +21,8 @@ class DictTest(unittest.TestCase):
self.assertEqual(d.keys(), []) self.assertEqual(d.keys(), [])
d = {'a': 1, 'b': 2} d = {'a': 1, 'b': 2}
k = d.keys() k = d.keys()
self.assert_(d.has_key('a')) self.assert_('a' in d)
self.assert_(d.has_key('b')) self.assert_('b' in d)
self.assertRaises(TypeError, d.keys, None) self.assertRaises(TypeError, d.keys, None)
@ -43,16 +43,6 @@ class DictTest(unittest.TestCase):
self.assertRaises(TypeError, d.items, None) self.assertRaises(TypeError, d.items, None)
def test_has_key(self):
d = {}
self.assert_(not d.has_key('a'))
d = {'a': 1, 'b': 2}
k = d.keys()
k.sort()
self.assertEqual(k, ['a', 'b'])
self.assertRaises(TypeError, d.has_key)
def test_contains(self): def test_contains(self):
d = {} d = {}
self.assert_(not ('a' in d)) self.assert_(not ('a' in d))

View File

@ -17,7 +17,7 @@ a = g.keys()
if verbose: if verbose:
print 'Test gdbm file keys: ', a print 'Test gdbm file keys: ', a
g.has_key('a') 'a' in g
g.close() g.close()
try: try:
g['a'] g['a']

View File

@ -472,7 +472,7 @@ def f():
f() f()
g = {} g = {}
exec 'z = 1' in g exec 'z = 1' in g
if g.has_key('__builtins__'): del g['__builtins__'] if '__builtins__' in g: del g['__builtins__']
if g != {'z': 1}: raise TestFailed, 'exec \'z = 1\' in g' if g != {'z': 1}: raise TestFailed, 'exec \'z = 1\' in g'
g = {} g = {}
l = {} l = {}
@ -480,8 +480,8 @@ l = {}
import warnings import warnings
warnings.filterwarnings("ignore", "global statement", module="<string>") warnings.filterwarnings("ignore", "global statement", module="<string>")
exec 'global a; a = 1; b = 2' in g, l exec 'global a; a = 1; b = 2' in g, l
if g.has_key('__builtins__'): del g['__builtins__'] if '__builtins__' in g: del g['__builtins__']
if l.has_key('__builtins__'): del l['__builtins__'] if '__builtins__' in l: del l['__builtins__']
if (g, l) != ({'a':1}, {'b':2}): raise TestFailed, 'exec ... in g (%s), l (%s)' %(g,l) if (g, l) != ({'a':1}, {'b':2}): raise TestFailed, 'exec ... in g (%s), l (%s)' %(g,l)

View File

@ -229,16 +229,9 @@ class TestMailbox(TestBase):
count += 1 count += 1
self.assert_(len(values) == count) self.assert_(len(values) == count)
def test_has_key(self):
# Check existence of keys using has_key()
self._test_has_key_or_contains(self._box.has_key)
def test_contains(self): def test_contains(self):
# Check existence of keys using __contains__() # Check existence of keys using __contains__()
self._test_has_key_or_contains(self._box.__contains__) method = self._box.__contains__
def _test_has_key_or_contains(self, method):
# (Used by test_has_key() and test_contains().)
self.assert_(not method('foo')) self.assert_(not method('foo'))
key0 = self._box.add(self._template % 0) key0 = self._box.add(self._template % 0)
self.assert_(method(key0)) self.assert_(method(key0))
@ -442,7 +435,7 @@ class TestMailboxSuperclass(TestBase):
self.assertRaises(NotImplementedError, lambda: box.get_message('')) self.assertRaises(NotImplementedError, lambda: box.get_message(''))
self.assertRaises(NotImplementedError, lambda: box.get_string('')) self.assertRaises(NotImplementedError, lambda: box.get_string(''))
self.assertRaises(NotImplementedError, lambda: box.get_file('')) self.assertRaises(NotImplementedError, lambda: box.get_file(''))
self.assertRaises(NotImplementedError, lambda: box.has_key('')) self.assertRaises(NotImplementedError, lambda: '' in box)
self.assertRaises(NotImplementedError, lambda: box.__contains__('')) self.assertRaises(NotImplementedError, lambda: box.__contains__(''))
self.assertRaises(NotImplementedError, lambda: box.__len__()) self.assertRaises(NotImplementedError, lambda: box.__len__())
self.assertRaises(NotImplementedError, lambda: box.clear()) self.assertRaises(NotImplementedError, lambda: box.clear())

View File

@ -297,7 +297,7 @@ class TestBase_Mapping(unittest.TestCase):
continue continue
unich = unichrs(data[1]) unich = unichrs(data[1])
if ord(unich) == 0xfffd or urt_wa.has_key(unich): if ord(unich) == 0xfffd or unich in urt_wa:
continue continue
urt_wa[unich] = csetch urt_wa[unich] = csetch

View File

@ -25,7 +25,6 @@ d[x1] = 1
for stmt in ['d[x2] = 2', for stmt in ['d[x2] = 2',
'z = d[x2]', 'z = d[x2]',
'x2 in d', 'x2 in d',
'd.has_key(x2)',
'd.get(x2)', 'd.get(x2)',
'd.setdefault(x2, 42)', 'd.setdefault(x2, 42)',
'd.pop(x2)', 'd.pop(x2)',

View File

@ -6,14 +6,14 @@ class TestImport(unittest.TestCase):
def __init__(self, *args, **kw): def __init__(self, *args, **kw):
self.package_name = 'PACKAGE_' self.package_name = 'PACKAGE_'
while sys.modules.has_key(self.package_name): while self.package_name in sys.modules:
self.package_name += random.choose(string.letters) self.package_name += random.choose(string.letters)
self.module_name = self.package_name + '.foo' self.module_name = self.package_name + '.foo'
unittest.TestCase.__init__(self, *args, **kw) unittest.TestCase.__init__(self, *args, **kw)
def remove_modules(self): def remove_modules(self):
for module_name in (self.package_name, self.module_name): for module_name in (self.package_name, self.module_name):
if sys.modules.has_key(module_name): if module_name in sys.modules:
del sys.modules[module_name] del sys.modules[module_name]
def setUp(self): def setUp(self):
@ -52,7 +52,7 @@ class TestImport(unittest.TestCase):
try: __import__(self.module_name) try: __import__(self.module_name)
except SyntaxError: pass except SyntaxError: pass
else: raise RuntimeError, 'Failed to induce SyntaxError' else: raise RuntimeError, 'Failed to induce SyntaxError'
self.assert_(not sys.modules.has_key(self.module_name) and self.assert_(self.module_name not in sys.modules and
not hasattr(sys.modules[self.package_name], 'foo')) not hasattr(sys.modules[self.package_name], 'foo'))
# ...make up a variable name that isn't bound in __builtins__ # ...make up a variable name that isn't bound in __builtins__

View File

@ -40,11 +40,11 @@ class PyclbrTest(TestCase):
def assertHaskey(self, obj, key, ignore): def assertHaskey(self, obj, key, ignore):
''' succeed iff obj.has_key(key) or key in ignore. ''' ''' succeed iff key in obj or key in ignore. '''
if key in ignore: return if key in ignore: return
if not obj.has_key(key): if key not in obj:
print >>sys.stderr, "***",key print >>sys.stderr, "***",key
self.failUnless(obj.has_key(key)) self.failUnless(key) in obj
def assertEqualsOrIgnored(self, a, b, ignore): def assertEqualsOrIgnored(self, a, b, ignore):
''' succeed iff a == b or a in ignore or b in ignore ''' ''' succeed iff a == b or a in ignore or b in ignore '''

View File

@ -25,7 +25,7 @@ class MessageTestCase(unittest.TestCase):
def test_setdefault(self): def test_setdefault(self):
msg = self.create_message( msg = self.create_message(
'To: "last, first" <userid@foo.net>\n\ntest\n') 'To: "last, first" <userid@foo.net>\n\ntest\n')
self.assert_(not msg.has_key("New-Header")) self.assert_("New-Header" not in msg)
self.assert_(msg.setdefault("New-Header", "New-Value") == "New-Value") self.assert_(msg.setdefault("New-Header", "New-Value") == "New-Value")
self.assert_(msg.setdefault("New-Header", "Different-Value") self.assert_(msg.setdefault("New-Header", "Different-Value")
== "New-Value") == "New-Value")

View File

@ -357,7 +357,7 @@ def test_expat_nsattrs_wattr():
attrs.getNames() == [(ns_uri, "attr")] and \ attrs.getNames() == [(ns_uri, "attr")] and \
(attrs.getQNames() == [] or attrs.getQNames() == ["ns:attr"]) and \ (attrs.getQNames() == [] or attrs.getQNames() == ["ns:attr"]) and \
len(attrs) == 1 and \ len(attrs) == 1 and \
attrs.has_key((ns_uri, "attr")) and \ (ns_uri, "attr") in attrs and \
attrs.keys() == [(ns_uri, "attr")] and \ attrs.keys() == [(ns_uri, "attr")] and \
attrs.get((ns_uri, "attr")) == "val" and \ attrs.get((ns_uri, "attr")) == "val" and \
attrs.get((ns_uri, "attr"), 25) == "val" and \ attrs.get((ns_uri, "attr"), 25) == "val" and \
@ -571,7 +571,7 @@ def verify_empty_attrs(attrs):
attrs.getNames() == [] and \ attrs.getNames() == [] and \
attrs.getQNames() == [] and \ attrs.getQNames() == [] and \
len(attrs) == 0 and \ len(attrs) == 0 and \
not attrs.has_key("attr") and \ "attr" not in attrs and \
attrs.keys() == [] and \ attrs.keys() == [] and \
attrs.get("attrs") is None and \ attrs.get("attrs") is None and \
attrs.get("attrs", 25) == 25 and \ attrs.get("attrs", 25) == 25 and \
@ -584,7 +584,7 @@ def verify_attrs_wattr(attrs):
attrs.getNames() == ["attr"] and \ attrs.getNames() == ["attr"] and \
attrs.getQNames() == ["attr"] and \ attrs.getQNames() == ["attr"] and \
len(attrs) == 1 and \ len(attrs) == 1 and \
attrs.has_key("attr") and \ "attr" in attrs and \
attrs.keys() == ["attr"] and \ attrs.keys() == ["attr"] and \
attrs.get("attr") == "val" and \ attrs.get("attr") == "val" and \
attrs.get("attr", 25) == "val" and \ attrs.get("attr", 25) == "val" and \
@ -639,7 +639,7 @@ def verify_empty_nsattrs(attrs):
attrs.getNames() == [] and \ attrs.getNames() == [] and \
attrs.getQNames() == [] and \ attrs.getQNames() == [] and \
len(attrs) == 0 and \ len(attrs) == 0 and \
not attrs.has_key((ns_uri, "attr")) and \ (ns_uri, "attr") not in attrs and \
attrs.keys() == [] and \ attrs.keys() == [] and \
attrs.get((ns_uri, "attr")) is None and \ attrs.get((ns_uri, "attr")) is None and \
attrs.get((ns_uri, "attr"), 25) == 25 and \ attrs.get((ns_uri, "attr"), 25) == 25 and \
@ -658,7 +658,7 @@ def test_nsattrs_wattr():
attrs.getNames() == [(ns_uri, "attr")] and \ attrs.getNames() == [(ns_uri, "attr")] and \
attrs.getQNames() == ["ns:attr"] and \ attrs.getQNames() == ["ns:attr"] and \
len(attrs) == 1 and \ len(attrs) == 1 and \
attrs.has_key((ns_uri, "attr")) and \ (ns_uri, "attr") in attrs and \
attrs.keys() == [(ns_uri, "attr")] and \ attrs.keys() == [(ns_uri, "attr")] and \
attrs.get((ns_uri, "attr")) == "val" and \ attrs.get((ns_uri, "attr")) == "val" and \
attrs.get((ns_uri, "attr"), 25) == "val" and \ attrs.get((ns_uri, "attr"), 25) == "val" and \

View File

@ -472,7 +472,7 @@ def f(x):
return g return g
d = f(2)(4) d = f(2)(4)
verify(d.has_key('h')) verify('h' in d)
del d['h'] del d['h']
vereq(d, {'x': 2, 'y': 7, 'w': 6}) vereq(d, {'x': 2, 'y': 7, 'w': 6})

View File

@ -216,7 +216,7 @@ class ImportSideEffectTests(unittest.TestCase):
def test_sitecustomize_executed(self): def test_sitecustomize_executed(self):
# If sitecustomize is available, it should have been imported. # If sitecustomize is available, it should have been imported.
if not sys.modules.has_key("sitecustomize"): if "sitecustomize" not in sys.modules:
try: try:
import sitecustomize import sitecustomize
except ImportError: except ImportError:

View File

@ -266,7 +266,7 @@ class IntTester:
if x < 0: if x < 0:
expected += 1L << self.bitsize expected += 1L << self.bitsize
assert expected > 0 assert expected > 0
expected = hex(expected)[2:-1] # chop "0x" and trailing 'L' expected = hex(expected)[2:] # chop "0x"
if len(expected) & 1: if len(expected) & 1:
expected = "0" + expected expected = "0" + expected
expected = unhexlify(expected) expected = unhexlify(expected)
@ -322,7 +322,7 @@ class IntTester:
# Try big-endian. # Try big-endian.
format = ">" + code format = ">" + code
expected = long(x) expected = long(x)
expected = hex(expected)[2:-1] # chop "0x" and trailing 'L' expected = hex(expected)[2:] # chop "0x"
if len(expected) & 1: if len(expected) & 1:
expected = "0" + expected expected = "0" + expected
expected = unhexlify(expected) expected = unhexlify(expected)

View File

@ -180,7 +180,7 @@ class TimeTestCase(unittest.TestCase):
# rely on it. # rely on it.
if org_TZ is not None: if org_TZ is not None:
environ['TZ'] = org_TZ environ['TZ'] = org_TZ
elif environ.has_key('TZ'): elif 'TZ' in environ:
del environ['TZ'] del environ['TZ']
time.tzset() time.tzset()

View File

@ -653,7 +653,7 @@ class HandlerTests(unittest.TestCase):
r.info; r.geturl # addinfourl methods r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply() r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info() hdrs = r.info()
hdrs.get; hdrs.has_key # r.info() gives dict from .getreply() hdrs.get; hdrs.__contains__ # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url) self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com") self.assertEqual(http.host, "example.com")

View File

@ -94,13 +94,10 @@ class UserDictTest(mapping_tests.TestHashMappingProtocol):
self.assertEqual(u2.items(), d2.items()) self.assertEqual(u2.items(), d2.items())
self.assertEqual(u2.values(), d2.values()) self.assertEqual(u2.values(), d2.values())
# Test has_key and "in". # Test "in".
for i in u2.keys(): for i in u2.keys():
self.assert_(u2.has_key(i))
self.assert_(i in u2) self.assert_(i in u2)
self.assertEqual(u1.has_key(i), d1.has_key(i))
self.assertEqual(i in u1, i in d1) self.assertEqual(i in u1, i in d1)
self.assertEqual(u0.has_key(i), d0.has_key(i))
self.assertEqual(i in u0, i in d0) self.assertEqual(i in u0, i in d0)
# Test update # Test update
@ -132,7 +129,7 @@ class UserDictTest(mapping_tests.TestHashMappingProtocol):
# Test setdefault # Test setdefault
t = UserDict.UserDict() t = UserDict.UserDict()
self.assertEqual(t.setdefault("x", 42), 42) self.assertEqual(t.setdefault("x", 42), 42)
self.assert_(t.has_key("x")) self.assert_("x" in t)
self.assertEqual(t.setdefault("x", 23), 42) self.assertEqual(t.setdefault("x", 23), 42)
# Test pop # Test pop
@ -269,9 +266,6 @@ class UserDictMixinTest(mapping_tests.TestMappingProtocol):
self.assertEqual(s.keys(), [10, 30]) self.assertEqual(s.keys(), [10, 30])
## Now, test the DictMixin methods one by one ## Now, test the DictMixin methods one by one
# has_key
self.assert_(s.has_key(10))
self.assert_(not s.has_key(20))
# __contains__ # __contains__
self.assert_(10 in s) self.assert_(10 in s)

View File

@ -739,7 +739,7 @@ class MappingTestCase(TestBase):
def test_weak_keys(self): def test_weak_keys(self):
# #
# This exercises d.copy(), d.items(), d[] = v, d[], del d[], # This exercises d.copy(), d.items(), d[] = v, d[], del d[],
# len(d), d.has_key(). # len(d), k in d.
# #
dict, objects = self.make_weak_keyed_dict() dict, objects = self.make_weak_keyed_dict()
for o in objects: for o in objects:
@ -761,8 +761,8 @@ class MappingTestCase(TestBase):
"deleting the keys did not clear the dictionary") "deleting the keys did not clear the dictionary")
o = Object(42) o = Object(42)
dict[o] = "What is the meaning of the universe?" dict[o] = "What is the meaning of the universe?"
self.assert_(dict.has_key(o)) self.assert_(o in dict)
self.assert_(not dict.has_key(34)) self.assert_(34 not in dict)
def test_weak_keyed_iters(self): def test_weak_keyed_iters(self):
dict, objects = self.make_weak_keyed_dict() dict, objects = self.make_weak_keyed_dict()
@ -774,7 +774,7 @@ class MappingTestCase(TestBase):
objects2 = list(objects) objects2 = list(objects)
for wr in refs: for wr in refs:
ob = wr() ob = wr()
self.assert_(dict.has_key(ob)) self.assert_(ob in dict)
self.assert_(ob in dict) self.assert_(ob in dict)
self.assertEqual(ob.arg, dict[ob]) self.assertEqual(ob.arg, dict[ob])
objects2.remove(ob) objects2.remove(ob)
@ -785,7 +785,7 @@ class MappingTestCase(TestBase):
self.assertEqual(len(list(dict.iterkeyrefs())), len(objects)) self.assertEqual(len(list(dict.iterkeyrefs())), len(objects))
for wr in dict.iterkeyrefs(): for wr in dict.iterkeyrefs():
ob = wr() ob = wr()
self.assert_(dict.has_key(ob)) self.assert_(ob in dict)
self.assert_(ob in dict) self.assert_(ob in dict)
self.assertEqual(ob.arg, dict[ob]) self.assertEqual(ob.arg, dict[ob])
objects2.remove(ob) objects2.remove(ob)
@ -900,13 +900,13 @@ class MappingTestCase(TestBase):
weakdict = klass() weakdict = klass()
o = weakdict.setdefault(key, value1) o = weakdict.setdefault(key, value1)
self.assert_(o is value1) self.assert_(o is value1)
self.assert_(weakdict.has_key(key)) self.assert_(key in weakdict)
self.assert_(weakdict.get(key) is value1) self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1) self.assert_(weakdict[key] is value1)
o = weakdict.setdefault(key, value2) o = weakdict.setdefault(key, value2)
self.assert_(o is value1) self.assert_(o is value1)
self.assert_(weakdict.has_key(key)) self.assert_(key in weakdict)
self.assert_(weakdict.get(key) is value1) self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1) self.assert_(weakdict[key] is value1)
@ -920,20 +920,20 @@ class MappingTestCase(TestBase):
def check_update(self, klass, dict): def check_update(self, klass, dict):
# #
# This exercises d.update(), len(d), d.keys(), d.has_key(), # This exercises d.update(), len(d), d.keys(), k in d,
# d.get(), d[]. # d.get(), d[].
# #
weakdict = klass() weakdict = klass()
weakdict.update(dict) weakdict.update(dict)
self.assert_(len(weakdict) == len(dict)) self.assert_(len(weakdict) == len(dict))
for k in weakdict.keys(): for k in weakdict.keys():
self.assert_(dict.has_key(k), self.assert_(k in dict,
"mysterious new key appeared in weak dict") "mysterious new key appeared in weak dict")
v = dict.get(k) v = dict.get(k)
self.assert_(v is weakdict[k]) self.assert_(v is weakdict[k])
self.assert_(v is weakdict.get(k)) self.assert_(v is weakdict.get(k))
for k in dict.keys(): for k in dict.keys():
self.assert_(weakdict.has_key(k), self.assert_(k in weakdict,
"original key disappeared in weak dict") "original key disappeared in weak dict")
v = dict[k] v = dict[k]
self.assert_(v is weakdict[k]) self.assert_(v is weakdict[k])

View File

@ -341,7 +341,7 @@ class HeaderTests(TestCase):
del h['foo'] # should not raise an error del h['foo'] # should not raise an error
h['Foo'] = 'bar' h['Foo'] = 'bar'
for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__: for m in h.__contains__, h.get, h.get_all, h.__getitem__:
self.failUnless(m('foo')) self.failUnless(m('foo'))
self.failUnless(m('Foo')) self.failUnless(m('Foo'))
self.failUnless(m('FOO')) self.failUnless(m('FOO'))
@ -424,10 +424,10 @@ class HandlerTests(TestCase):
env = handler.environ env = handler.environ
from os import environ from os import environ
for k,v in environ.items(): for k,v in environ.items():
if not empty.has_key(k): if k not in empty:
self.assertEqual(env[k],v) self.assertEqual(env[k],v)
for k,v in empty.items(): for k,v in empty.items():
self.failUnless(env.has_key(k)) self.failUnless(k in env)
def testEnviron(self): def testEnviron(self):
h = TestHandler(X="Y") h = TestHandler(X="Y")
@ -440,7 +440,7 @@ class HandlerTests(TestCase):
h = BaseCGIHandler(None,None,None,{}) h = BaseCGIHandler(None,None,None,{})
h.setup_environ() h.setup_environ()
for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors': for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
self.assert_(h.environ.has_key(key)) self.assert_(key in h.environ)
def testScheme(self): def testScheme(self):
h=TestHandler(HTTPS="on"); h.setup_environ() h=TestHandler(HTTPS="on"); h.setup_environ()

View File

@ -120,7 +120,7 @@ class Ignore:
self._ignore = { '<string>': 1 } self._ignore = { '<string>': 1 }
def names(self, filename, modulename): def names(self, filename, modulename):
if self._ignore.has_key(modulename): if modulename in self._ignore:
return self._ignore[modulename] return self._ignore[modulename]
# haven't seen this one before, so see if the module name is # haven't seen this one before, so see if the module name is

View File

@ -153,7 +153,7 @@ class TestResult:
return ''.join(traceback.format_exception(exctype, value, tb)) return ''.join(traceback.format_exception(exctype, value, tb))
def _is_relevant_tb_level(self, tb): def _is_relevant_tb_level(self, tb):
return tb.tb_frame.f_globals.has_key('__unittest') return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb): def _count_relevant_tb_levels(self, tb):
length = 0 length = 0

View File

@ -114,7 +114,7 @@ class URLopener:
def __init__(self, proxies=None, **x509): def __init__(self, proxies=None, **x509):
if proxies is None: if proxies is None:
proxies = getproxies() proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping" assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies self.proxies = proxies
self.key_file = x509.get('key_file') self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file') self.cert_file = x509.get('cert_file')

View File

@ -660,7 +660,7 @@ class ProxyHandler(BaseHandler):
def __init__(self, proxies=None): def __init__(self, proxies=None):
if proxies is None: if proxies is None:
proxies = getproxies() proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping" assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies self.proxies = proxies
for type, url in proxies.items(): for type, url in proxies.items():
setattr(self, '%s_open' % type, setattr(self, '%s_open' % type,

View File

@ -64,13 +64,6 @@ class WeakValueDictionary(UserDict.UserDict):
return False return False
return o is not None return o is not None
def has_key(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def __repr__(self): def __repr__(self):
return "<WeakValueDictionary at %s>" % id(self) return "<WeakValueDictionary at %s>" % id(self)
@ -259,13 +252,6 @@ class WeakKeyDictionary(UserDict.UserDict):
def get(self, key, default=None): def get(self, key, default=None):
return self.data.get(ref(key),default) return self.data.get(ref(key),default)
def has_key(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def __contains__(self, key): def __contains__(self, key):
try: try:
wr = ref(key) wr = ref(key)

View File

@ -159,7 +159,7 @@ class BaseHandler:
Subclasses can extend this to add other defaults. Subclasses can extend this to add other defaults.
""" """
if not self.headers.has_key('Content-Length'): if 'Content-Length' not in self.headers:
self.set_content_length() self.set_content_length()
def start_response(self, status, headers,exc_info=None): def start_response(self, status, headers,exc_info=None):
@ -194,11 +194,11 @@ class BaseHandler:
if self.origin_server: if self.origin_server:
if self.client_is_modern(): if self.client_is_modern():
self._write('HTTP/%s %s\r\n' % (self.http_version,self.status)) self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
if not self.headers.has_key('Date'): if 'Date' not in self.headers:
self._write( self._write(
'Date: %s\r\n' % format_date_time(time.time()) 'Date: %s\r\n' % format_date_time(time.time())
) )
if self.server_software and not self.headers.has_key('Server'): if self.server_software and 'Server' not in self.headers:
self._write('Server: %s\r\n' % self.server_software) self._write('Server: %s\r\n' % self.server_software)
else: else:
self._write('Status: %s\r\n' % self.status) self._write('Status: %s\r\n' % self.status)

View File

@ -80,12 +80,10 @@ class Headers:
def has_key(self, name): def __contains__(self, name):
"""Return true if the message contains the header.""" """Return true if the message contains the header."""
return self.get(name) is not None return self.get(name) is not None
__contains__ = has_key
def get_all(self, name): def get_all(self, name):
"""Return a list of all the values for the named field. """Return a list of all the values for the named field.

View File

@ -166,7 +166,7 @@ _hoppish = {
'connection':1, 'keep-alive':1, 'proxy-authenticate':1, 'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1, 'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1 'upgrade':1
}.has_key }.__contains__
def is_hop_by_hop(header_name): def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header""" """Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""

View File

@ -345,7 +345,7 @@ def check_environ(environ):
"Invalid CONTENT_LENGTH: %r" % environ['CONTENT_LENGTH']) "Invalid CONTENT_LENGTH: %r" % environ['CONTENT_LENGTH'])
if not environ.get('SCRIPT_NAME'): if not environ.get('SCRIPT_NAME'):
assert_(environ.has_key('PATH_INFO'), assert_('PATH_INFO' in environ,
"One of SCRIPT_NAME or PATH_INFO are required (PATH_INFO " "One of SCRIPT_NAME or PATH_INFO are required (PATH_INFO "
"should at least be '/' if SCRIPT_NAME is empty)") "should at least be '/' if SCRIPT_NAME is empty)")
assert_(environ.get('SCRIPT_NAME') != '/', assert_(environ.get('SCRIPT_NAME') != '/',

View File

@ -686,7 +686,7 @@ class Marshaller:
def dump_array(self, value, write): def dump_array(self, value, write):
i = id(value) i = id(value)
if self.memo.has_key(i): if i in self.memo:
raise TypeError, "cannot marshal recursive sequences" raise TypeError, "cannot marshal recursive sequences"
self.memo[i] = None self.memo[i] = None
dump = self.__dump dump = self.__dump
@ -700,7 +700,7 @@ class Marshaller:
def dump_struct(self, value, write, escape=escape): def dump_struct(self, value, write, escape=escape):
i = id(value) i = id(value)
if self.memo.has_key(i): if i in self.memo:
raise TypeError, "cannot marshal recursive dictionaries" raise TypeError, "cannot marshal recursive dictionaries"
self.memo[i] = None self.memo[i] = None
dump = self.__dump dump = self.__dump

View File

@ -461,7 +461,7 @@ bsddb_keys(bsddbobject *dp)
} }
static PyObject * static PyObject *
bsddb_has_key(bsddbobject *dp, PyObject *args) bsddb_contains(bsddbobject *dp, PyObject *args)
{ {
DBT krec, drec; DBT krec, drec;
int status; int status;
@ -640,7 +640,7 @@ bsddb_sync(bsddbobject *dp)
static PyMethodDef bsddb_methods[] = { static PyMethodDef bsddb_methods[] = {
{"close", (PyCFunction)bsddb_close, METH_NOARGS}, {"close", (PyCFunction)bsddb_close, METH_NOARGS},
{"keys", (PyCFunction)bsddb_keys, METH_NOARGS}, {"keys", (PyCFunction)bsddb_keys, METH_NOARGS},
{"has_key", (PyCFunction)bsddb_has_key, METH_VARARGS}, {"__contains__", (PyCFunction)bsddb_contains, METH_VARARGS},
{"set_location", (PyCFunction)bsddb_set_location, METH_VARARGS}, {"set_location", (PyCFunction)bsddb_set_location, METH_VARARGS},
{"next", (PyCFunction)bsddb_next, METH_NOARGS}, {"next", (PyCFunction)bsddb_next, METH_NOARGS},
{"previous", (PyCFunction)bsddb_previous, METH_NOARGS}, {"previous", (PyCFunction)bsddb_previous, METH_NOARGS},

View File

@ -206,12 +206,12 @@ dbm_keys(register dbmobject *dp, PyObject *unused)
} }
static PyObject * static PyObject *
dbm_has_key(register dbmobject *dp, PyObject *args) dbm_contains(register dbmobject *dp, PyObject *args)
{ {
datum key, val; datum key, val;
int tmp_size; int tmp_size;
if (!PyArg_ParseTuple(args, "s#:has_key", &key.dptr, &tmp_size)) if (!PyArg_ParseTuple(args, "s#:__contains__", &key.dptr, &tmp_size))
return NULL; return NULL;
key.dsize = tmp_size; key.dsize = tmp_size;
check_dbmobject_open(dp); check_dbmobject_open(dp);
@ -277,8 +277,8 @@ static PyMethodDef dbm_methods[] = {
"close()\nClose the database."}, "close()\nClose the database."},
{"keys", (PyCFunction)dbm_keys, METH_NOARGS, {"keys", (PyCFunction)dbm_keys, METH_NOARGS,
"keys() -> list\nReturn a list of all keys in the database."}, "keys() -> list\nReturn a list of all keys in the database."},
{"has_key", (PyCFunction)dbm_has_key, METH_VARARGS, {"__contains__",(PyCFunction)dbm_contains, METH_VARARGS,
"has_key(key} -> boolean\nReturn true iff key is in the database."}, "__contains__(key} -> boolean\True iff key is in the database."},
{"get", (PyCFunction)dbm_get, METH_VARARGS, {"get", (PyCFunction)dbm_get, METH_VARARGS,
"get(key[, default]) -> value\n" "get(key[, default]) -> value\n"
"Return the value for key if present, otherwise default."}, "Return the value for key if present, otherwise default."},

View File

@ -241,16 +241,16 @@ dbm_keys(register dbmobject *dp, PyObject *unused)
return v; return v;
} }
PyDoc_STRVAR(dbm_has_key__doc__, PyDoc_STRVAR(dbm_contains__doc__,
"has_key(key) -> boolean\n\ "__contains__(key) -> bool\n\
Find out whether or not the database contains a given key."); Find out whether or not the database contains a given key.");
static PyObject * static PyObject *
dbm_has_key(register dbmobject *dp, PyObject *args) dbm_contains(register dbmobject *dp, PyObject *args)
{ {
datum key; datum key;
if (!PyArg_ParseTuple(args, "s#:has_key", &key.dptr, &key.dsize)) if (!PyArg_ParseTuple(args, "s#:contains", &key.dptr, &key.dsize))
return NULL; return NULL;
check_dbmobject_open(dp); check_dbmobject_open(dp);
return PyInt_FromLong((long) gdbm_exists(dp->di_dbm, key)); return PyInt_FromLong((long) gdbm_exists(dp->di_dbm, key));
@ -355,7 +355,7 @@ dbm_sync(register dbmobject *dp, PyObject *unused)
static PyMethodDef dbm_methods[] = { static PyMethodDef dbm_methods[] = {
{"close", (PyCFunction)dbm_close, METH_NOARGS, dbm_close__doc__}, {"close", (PyCFunction)dbm_close, METH_NOARGS, dbm_close__doc__},
{"keys", (PyCFunction)dbm_keys, METH_NOARGS, dbm_keys__doc__}, {"keys", (PyCFunction)dbm_keys, METH_NOARGS, dbm_keys__doc__},
{"has_key", (PyCFunction)dbm_has_key, METH_VARARGS, dbm_has_key__doc__}, {"__contains__",(PyCFunction)dbm_contains,METH_VARARGS, dbm_contains__doc__},
{"firstkey", (PyCFunction)dbm_firstkey,METH_NOARGS, dbm_firstkey__doc__}, {"firstkey", (PyCFunction)dbm_firstkey,METH_NOARGS, dbm_firstkey__doc__},
{"nextkey", (PyCFunction)dbm_nextkey, METH_VARARGS, dbm_nextkey__doc__}, {"nextkey", (PyCFunction)dbm_nextkey, METH_VARARGS, dbm_nextkey__doc__},
{"reorganize",(PyCFunction)dbm_reorganize,METH_NOARGS, dbm_reorganize__doc__}, {"reorganize",(PyCFunction)dbm_reorganize,METH_NOARGS, dbm_reorganize__doc__},

View File

@ -700,15 +700,6 @@ static PySequenceMethods proxy_as_sequence = {
0, /* sq_inplace_repeat */ 0, /* sq_inplace_repeat */
}; };
static PyObject *
proxy_has_key(proxyobject *pp, PyObject *key)
{
int res = PyDict_Contains(pp->dict, key);
if (res < 0)
return NULL;
return PyBool_FromLong(res);
}
static PyObject * static PyObject *
proxy_get(proxyobject *pp, PyObject *args) proxy_get(proxyobject *pp, PyObject *args)
{ {
@ -761,10 +752,8 @@ proxy_copy(proxyobject *pp)
} }
static PyMethodDef proxy_methods[] = { static PyMethodDef proxy_methods[] = {
{"has_key", (PyCFunction)proxy_has_key, METH_O,
PyDoc_STR("D.has_key(k) -> True if D has a key k, else False")},
{"get", (PyCFunction)proxy_get, METH_VARARGS, {"get", (PyCFunction)proxy_get, METH_VARARGS,
PyDoc_STR("D.get(k[,d]) -> D[k] if D.has_key(k), else d." PyDoc_STR("D.get(k[,d]) -> D[k] if k in D, else d."
" d defaults to None.")}, " d defaults to None.")},
{"keys", (PyCFunction)proxy_keys, METH_NOARGS, {"keys", (PyCFunction)proxy_keys, METH_NOARGS,
PyDoc_STR("D.keys() -> list of D's keys")}, PyDoc_STR("D.keys() -> list of D's keys")},

View File

@ -1621,7 +1621,7 @@ dict_richcompare(PyObject *v, PyObject *w, int op)
} }
static PyObject * static PyObject *
dict_has_key(register dictobject *mp, PyObject *key) dict_contains(register dictobject *mp, PyObject *key)
{ {
long hash; long hash;
dictentry *ep; dictentry *ep;
@ -1856,9 +1856,6 @@ dict_iteritems(dictobject *dict)
} }
PyDoc_STRVAR(has_key__doc__,
"D.has_key(k) -> True if D has a key k, else False");
PyDoc_STRVAR(contains__doc__, PyDoc_STRVAR(contains__doc__,
"D.__contains__(k) -> True if D has a key k, else False"); "D.__contains__(k) -> True if D has a key k, else False");
@ -1911,12 +1908,10 @@ PyDoc_STRVAR(iteritems__doc__,
"D.iteritems() -> an iterator over the (key, value) items of D"); "D.iteritems() -> an iterator over the (key, value) items of D");
static PyMethodDef mapp_methods[] = { static PyMethodDef mapp_methods[] = {
{"__contains__",(PyCFunction)dict_has_key, METH_O | METH_COEXIST, {"__contains__",(PyCFunction)dict_contains, METH_O | METH_COEXIST,
contains__doc__}, contains__doc__},
{"__getitem__", (PyCFunction)dict_subscript, METH_O | METH_COEXIST, {"__getitem__", (PyCFunction)dict_subscript, METH_O | METH_COEXIST,
getitem__doc__}, getitem__doc__},
{"has_key", (PyCFunction)dict_has_key, METH_O,
has_key__doc__},
{"get", (PyCFunction)dict_get, METH_VARARGS, {"get", (PyCFunction)dict_get, METH_VARARGS,
get__doc__}, get__doc__},
{"setdefault", (PyCFunction)dict_setdefault, METH_VARARGS, {"setdefault", (PyCFunction)dict_setdefault, METH_VARARGS,

View File

@ -4432,7 +4432,6 @@ slot_tp_hash(PyObject *self)
} }
if (func == NULL) { if (func == NULL) {
PyErr_Clear();
PyErr_Format(PyExc_TypeError, "unhashable type: '%.200s'", PyErr_Format(PyExc_TypeError, "unhashable type: '%.200s'",
self->ob_type->tp_name); self->ob_type->tp_name);
return -1; return -1;

View File

@ -674,7 +674,7 @@ class PyBuildExt(build_ext):
db_minor = int(m.group(1)) db_minor = int(m.group(1))
db_ver = (db_major, db_minor) db_ver = (db_major, db_minor)
if ( (not db_ver_inc_map.has_key(db_ver)) and if ( (db_ver not in db_ver_inc_map) and
(db_ver <= max_db_ver and db_ver >= min_db_ver) ): (db_ver <= max_db_ver and db_ver >= min_db_ver) ):
# save the include directory with the db.h version # save the include directory with the db.h version
# (first occurrance only) # (first occurrance only)