Get rid of dict.has_key(). Boy this has a lot of repercussions!

Not all code has been fixed yet; this is just a checkpoint...
The C API still has PyDict_HasKey() and _HasKeyString(); not sure
if I want to change those just yet.
This commit is contained in:
Guido van Rossum 2006-08-18 22:13:04 +00:00
parent d2dbecb4ae
commit e2b70bcf74
93 changed files with 215 additions and 313 deletions

View file

@ -1115,7 +1115,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
/*
On success, return 1 if the mapping object has the key, key,
and 0 otherwise. This is equivalent to the Python expression:
o.has_key(key).
key in o.
This function always succeeds.
*/
@ -1125,7 +1125,7 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
/*
Return 1 if the mapping object has the key, key,
and 0 otherwise. This is equivalent to the Python expression:
o.has_key(key).
key in o.
This function always succeeds.

View file

@ -174,7 +174,7 @@ def generate_html_documentation(self):
methods = {}
for method_name in self.system_listMethods():
if self.funcs.has_key(method_name):
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
method_info = [None, None] # argspec, documentation

View file

@ -313,7 +313,7 @@ def system_methodHelp(self, method_name):
Returns a string containing documentation for the specified method."""
method = None
if self.funcs.has_key(method_name):
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method

View file

@ -41,7 +41,6 @@ def iteritems(self): return self.data.iteritems()
def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values()
def has_key(self, key): return self.data.has_key(key)
def update(self, dict=None, **kwargs):
if dict is None:
pass
@ -55,11 +54,11 @@ def update(self, dict=None, **kwargs):
if len(kwargs):
self.data.update(kwargs)
def get(self, key, failobj=None):
if not self.has_key(key):
if key not in self:
return failobj
return self[key]
def setdefault(self, key, failobj=None):
if not self.has_key(key):
if key not in self:
self[key] = failobj
return self[key]
def pop(self, key, *args):
@ -91,14 +90,12 @@ class DictMixin:
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
def __contains__(self, key):
try:
value = self[key]
except KeyError:
return False
return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions
def iteritems(self):

View file

@ -247,7 +247,7 @@ def del_channel(self, map=None):
fd = self._fileno
if map is None:
map = self._map
if map.has_key(fd):
if fd in map:
#self.log_info('closing channel %d:%s' % (fd, self))
del map[fd]
self._fileno = None

View file

@ -133,8 +133,7 @@ def do_clear(self, arg):
raise NotImplementedError, "subclass of bdb must implement do_clear()"
def break_anywhere(self, frame):
return self.breaks.has_key(
self.canonic(frame.f_code.co_filename))
return self.canonic(frame.f_code.co_filename) in self.breaks
# Derived classes should override the user_* methods
# to gain control.
@ -245,7 +244,7 @@ def clear_break(self, filename, lineno):
# pair, then remove the breaks entry
for bp in Breakpoint.bplist[filename, lineno][:]:
bp.deleteMe()
if not Breakpoint.bplist.has_key((filename, lineno)):
if (filename, lineno) not in Breakpoint.bplist:
self.breaks[filename].remove(lineno)
if not self.breaks[filename]:
del self.breaks[filename]
@ -453,7 +452,7 @@ def __init__(self, file, line, temporary=0, cond=None, funcname=None):
Breakpoint.next = Breakpoint.next + 1
# Build the two lists
self.bpbynumber.append(self)
if self.bplist.has_key((file, line)):
if (file, line) in self.bplist:
self.bplist[file, line].append(self)
else:
self.bplist[file, line] = [self]

View file

@ -255,6 +255,8 @@ def has_key(self, key):
self._checkOpen()
return _DeadlockWrap(self.db.has_key, key)
__contains__ = has_key
def set_location(self, key):
self._checkOpen()
self._checkCursor()

View file

@ -21,7 +21,7 @@
# added to _bsddb.c.
#
import db
from . import db
try:
from UserDict import DictMixin
@ -161,6 +161,8 @@ def key_range(self, *args, **kwargs):
return self._cobj.key_range(*args, **kwargs)
def has_key(self, *args, **kwargs):
return self._cobj.has_key(*args, **kwargs)
def __contains__(self, key):
return self._cobj.has_key(key)
def items(self, *args, **kwargs):
return self._cobj.items(*args, **kwargs)
def keys(self, *args, **kwargs):

View file

@ -35,7 +35,7 @@
except ImportError:
# DictMixin is new in Python 2.3
class DictMixin: pass
import db
from . import db
#------------------------------------------------------------------------
@ -197,6 +197,10 @@ def join(self, cursorList, flags=0):
raise NotImplementedError
def __contains__(self, key):
return self.has_key(key)
#----------------------------------------------
# Methods allowed to pass-through to self.db
#

View file

@ -55,7 +55,7 @@ def DeadlockWrap(function, *_args, **_kwargs):
"""
sleeptime = _deadlock_MinSleepTime
max_retries = _kwargs.get('max_retries', -1)
if _kwargs.has_key('max_retries'):
if 'max_tries' in _kwargs:
del _kwargs['max_retries']
while True:
try:

View file

@ -41,8 +41,12 @@ def testPrintVersions(self):
# This little hack is for when this module is run as main and all the
# other modules import it so they will still be able to get the right
# verbose setting. It's confusing but it works.
import test_all
test_all.verbose = verbose
try:
import test_all
except ImportError:
pass
else:
test_all.verbose = verbose
def suite():

View file

@ -14,7 +14,7 @@
have_threads = 0
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -20,7 +20,7 @@
# For Python 2.3
from bsddb import db
from test_all import verbose
from .test_all import verbose
DASH = '-'

View file

@ -3,9 +3,10 @@
"""
import sys, os, re
import test_all
from cStringIO import StringIO
from . import test_all
import unittest
try:
# For Pythons w/distutils pybsddb

View file

@ -7,7 +7,7 @@
import unittest
import tempfile
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -15,7 +15,7 @@
# For Python 2.3
from bsddb import db, dbshelve
from test_all import verbose
from .test_all import verbose
#----------------------------------------------------------------------

View file

@ -28,7 +28,7 @@
import pickle
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -15,7 +15,7 @@
# For Python 2.3
from bsddb import db
from test_all import verbose
from .test_all import verbose
# We're going to get warnings in this module about trying to close the db when
# its env is already closed. Let's just ignore those.

View file

@ -14,7 +14,7 @@
# For Python 2.3
from bsddb import db
from test_all import verbose
from .test_all import verbose
#----------------------------------------------------------------------

View file

@ -13,7 +13,7 @@
have_threads = 0
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -15,7 +15,7 @@
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -14,7 +14,7 @@
# For Python 2.3
from bsddb import db
from test_all import verbose
from .test_all import verbose
#----------------------------------------------------------------------

View file

@ -8,7 +8,7 @@
from pprint import pprint
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -10,7 +10,7 @@
except ImportError:
from bsddb import db
from test_all import verbose
from .test_all import verbose
class DBSequenceTest(unittest.TestCase):

View file

@ -31,7 +31,7 @@ class WindowsError(Exception):
pass
import unittest
from test_all import verbose
from .test_all import verbose
try:
# For Pythons w/distutils pybsddb

View file

@ -608,14 +608,6 @@ def keys(self):
if item.name not in keys: keys.append(item.name)
return keys
def has_key(self, key):
"""Dictionary style has_key() method."""
if self.list is None:
raise TypeError, "not indexable"
for item in self.list:
if item.name == key: return True
return False
def __contains__(self, key):
"""Dictionary style __contains__ method."""
if self.list is None:

View file

@ -199,7 +199,7 @@ def __init__(self, isjunk=None, a='', b=''):
# DON'T USE! Only __chain_b uses this. Use isbjunk.
# isbjunk
# for x in b, isbjunk(x) == isjunk(x) but much faster;
# it's really the has_key method of a hidden dict.
# it's really the __contains__ method of a hidden dict.
# DOES NOT WORK for x in a!
# isbpopular
# for x in b, isbpopular(x) is true iff b is reasonably long
@ -341,8 +341,8 @@ def __chain_b(self):
# lot of junk in the sequence, the number of *unique* junk
# elements is probably small. So the memory burden of keeping
# this dict alive is likely trivial compared to the size of b2j.
self.isbjunk = junkdict.has_key
self.isbpopular = populardict.has_key
self.isbjunk = junkdict.__contains__
self.isbpopular = populardict.__contains__
def find_longest_match(self, alo, ahi, blo, bhi):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
@ -674,7 +674,7 @@ def quick_ratio(self):
# avail[x] is the number of times x appears in 'b' less the
# number of times we've seen it in 'a' so far ... kinda
avail = {}
availhas, matches = avail.has_key, 0
availhas, matches = avail.__contains__, 0
for elt in self.a:
if availhas(elt):
numb = avail[elt]

View file

@ -124,7 +124,7 @@ def visit (z, dirname, names):
def check_archive_formats (formats):
for format in formats:
if not ARCHIVE_FORMATS.has_key(format):
if format not in ARCHIVE_FORMATS:
return format
else:
return None

View file

@ -159,7 +159,7 @@ class (via the 'executables' class attribute), but most will have:
# basically the same things with Unix C compilers.
for key in args.keys():
if not self.executables.has_key(key):
if key not in self.executables:
raise ValueError, \
"unknown executable '%s' for class %s" % \
(key, self.__class__.__name__)

View file

@ -341,7 +341,7 @@ def check_extensions_list (self, extensions):
# Medium-easy stuff: same syntax/semantics, different names.
ext.runtime_library_dirs = build_info.get('rpath')
if build_info.has_key('def_file'):
if 'def_file' in build_info:
log.warn("'def_file' element of build info dict "
"no longer supported")

View file

@ -101,9 +101,9 @@ class found in 'cmdclass' is used in place of the default, which is
else:
klass = Distribution
if not attrs.has_key('script_name'):
if 'script_name' not in attrs:
attrs['script_name'] = os.path.basename(sys.argv[0])
if not attrs.has_key('script_args'):
if 'script_args' not in attrs:
attrs['script_args'] = sys.argv[1:]
# Create the Distribution instance, using the remaining arguments
@ -111,7 +111,7 @@ class found in 'cmdclass' is used in place of the default, which is
try:
_setup_distribution = dist = klass(attrs)
except DistutilsSetupError, msg:
if attrs.has_key('name'):
if 'name' not in attrs:
raise SystemExit, "error in %s setup command: %s" % \
(attrs['name'], msg)
else:

View file

@ -239,7 +239,7 @@ def __init__ (self, attrs=None):
for (opt, val) in cmd_options.items():
opt_dict[opt] = ("setup script", val)
if attrs.has_key('licence'):
if 'licence' in attrs:
attrs['license'] = attrs['licence']
del attrs['licence']
msg = "'licence' distribution option is deprecated; use 'license'"
@ -343,7 +343,7 @@ def find_config_files (self):
user_filename = "pydistutils.cfg"
# And look for the user config file
if os.environ.has_key('HOME'):
if 'HOME' in os.environ:
user_file = os.path.join(os.environ.get('HOME'), user_filename)
if os.path.isfile(user_file):
files.append(user_file)
@ -388,7 +388,7 @@ def parse_config_files (self, filenames=None):
# If there was a "global" section in the config file, use it
# to set Distribution options.
if self.command_options.has_key('global'):
if 'global' in self.command_options:
for (opt, (src, val)) in self.command_options['global'].items():
alias = self.negative_opt.get(opt)
try:
@ -907,7 +907,7 @@ def _set_command_options (self, command_obj, option_dict=None):
try:
is_string = type(value) is StringType
if neg_opt.has_key(option) and is_string:
if option in neg_opt and is_string:
setattr(command_obj, neg_opt[option], not strtobool(value))
elif option in bool_opts and is_string:
setattr(command_obj, option, strtobool(value))

View file

@ -97,7 +97,7 @@ def set_option_table (self, option_table):
self._build_index()
def add_option (self, long_option, short_option=None, help_string=None):
if self.option_index.has_key(long_option):
if long_option in self.option_index:
raise DistutilsGetoptError, \
"option conflict: already an option '%s'" % long_option
else:
@ -109,7 +109,7 @@ def add_option (self, long_option, short_option=None, help_string=None):
def has_option (self, long_option):
"""Return true if the option table for this parser has an
option with long name 'long_option'."""
return self.option_index.has_key(long_option)
return long_option in self.option_index
def get_attr_name (self, long_option):
"""Translate long option name 'long_option' to the form it
@ -121,11 +121,11 @@ def get_attr_name (self, long_option):
def _check_alias_dict (self, aliases, what):
assert type(aliases) is DictionaryType
for (alias, opt) in aliases.items():
if not self.option_index.has_key(alias):
if alias not in self.option_index:
raise DistutilsGetoptError, \
("invalid %s '%s': "
"option '%s' not defined") % (what, alias, alias)
if not self.option_index.has_key(opt):
if opt not in self.option_index:
raise DistutilsGetoptError, \
("invalid %s '%s': "
"aliased option '%s' not defined") % (what, alias, opt)

View file

@ -150,22 +150,22 @@ def customize_compiler(compiler):
get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
'CCSHARED', 'LDSHARED', 'SO')
if os.environ.has_key('CC'):
if 'CC' in os.environ:
cc = os.environ['CC']
if os.environ.has_key('CXX'):
if 'CXX' in os.environ:
cxx = os.environ['CXX']
if os.environ.has_key('LDSHARED'):
if 'LDSHARED' in os.environ:
ldshared = os.environ['LDSHARED']
if os.environ.has_key('CPP'):
if 'CPP' in os.environ:
cpp = os.environ['CPP']
else:
cpp = cc + " -E" # not always
if os.environ.has_key('LDFLAGS'):
if 'LDFLAGS' in os.environ:
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if os.environ.has_key('CFLAGS'):
if 'CFLAGS' in os.environ:
cflags = opt + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS']
if os.environ.has_key('CPPFLAGS'):
if 'CPPFLAGS' in os.environ:
cpp = cpp + ' ' + os.environ['CPPFLAGS']
cflags = cflags + ' ' + os.environ['CPPFLAGS']
ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
@ -277,12 +277,12 @@ def parse_makefile(fn, g=None):
if m:
n = m.group(1)
found = True
if done.has_key(n):
if n in done:
item = str(done[n])
elif notdone.has_key(n):
elif n in notdone:
# get it on a subsequent round
found = False
elif os.environ.has_key(n):
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
else:
@ -366,7 +366,7 @@ def _init_posix():
# MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
# it needs to be compatible.
# If it isn't set we set it to the configure-time value
if sys.platform == 'darwin' and g.has_key('MACOSX_DEPLOYMENT_TARGET'):
if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g:
cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
if cur_target == '':

View file

@ -89,7 +89,7 @@ def __init__ (self, filename=None, file=None, **options):
# set values for all options -- either from client option hash
# or fallback to default_options
for opt in self.default_options.keys():
if options.has_key (opt):
if opt in options:
setattr (self, opt, options[opt])
else:
@ -97,7 +97,7 @@ def __init__ (self, filename=None, file=None, **options):
# sanity check client option hash
for opt in options.keys():
if not self.default_options.has_key (opt):
if opt not in self.default_options:
raise KeyError, "invalid TextFile option '%s'" % opt
if file is None:

View file

@ -200,11 +200,11 @@ def check_environ ():
if _environ_checked:
return
if os.name == 'posix' and not os.environ.has_key('HOME'):
if os.name == 'posix' and 'HOME' not in os.environ:
import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if not os.environ.has_key('PLAT'):
if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform()
_environ_checked = 1
@ -222,7 +222,7 @@ def subst_vars (s, local_vars):
check_environ()
def _subst (match, local_vars=local_vars):
var_name = match.group(1)
if local_vars.has_key(var_name):
if var_name in local_vars:
return str(local_vars[var_name])
else:
return os.environ[var_name]

View file

@ -195,9 +195,6 @@ def __delitem__(self, key):
def keys(self):
return self._index.keys()
def has_key(self, key):
return key in self._index
def __contains__(self, key):
return key in self._index

View file

@ -120,13 +120,10 @@ def items(self):
"""Return a list of (key, message) tuples. Memory intensive."""
return list(self.iteritems())
def has_key(self, key):
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
raise NotImplementedError('Method must be implemented by subclass')
def __contains__(self, key):
return self.has_key(key)
def __len__(self):
"""Return a count of messages in the mailbox."""
raise NotImplementedError('Method must be implemented by subclass')
@ -330,7 +327,7 @@ def iterkeys(self):
continue
yield key
def has_key(self, key):
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
self._refresh()
return key in self._toc
@ -515,7 +512,7 @@ def iterkeys(self):
for key in self._toc.keys():
yield key
def has_key(self, key):
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
self._lookup()
return key in self._toc
@ -902,7 +899,7 @@ def iterkeys(self):
return iter(sorted(int(entry) for entry in os.listdir(self._path)
if entry.isdigit()))
def has_key(self, key):
def __contains__(self, key):
"""Return True if the keyed message exists, False otherwise."""
return os.path.exists(os.path.join(self._path, str(key)))

View file

@ -242,7 +242,7 @@ def import_module(self, partname, fqname, parent):
else:
self.msgout(3, "import_module ->", m)
return m
if self.badmodules.has_key(fqname):
if fqname in self.badmodules:
self.msgout(3, "import_module -> None")
return None
if parent and parent.__path__ is None:
@ -388,7 +388,7 @@ def load_package(self, fqname, pathname):
return m
def add_module(self, fqname):
if self.modules.has_key(fqname):
if fqname in self.modules:
return self.modules[fqname]
self.modules[fqname] = m = Module(fqname)
return m

View file

@ -602,7 +602,7 @@ def _set_opt_strings(self, opts):
def _set_attrs(self, attrs):
for attr in self.ATTRS:
if attrs.has_key(attr):
if attr in attrs:
setattr(self, attr, attrs[attr])
del attrs[attr]
else:
@ -854,7 +854,7 @@ def _update_careful(self, dict):
are silently ignored.
"""
for attr in dir(self):
if dict.has_key(attr):
if attr in dict:
dval = dict[attr]
if dval is not None:
setattr(self, attr, dval)
@ -974,10 +974,10 @@ def destroy(self):
def _check_conflict(self, option):
conflict_opts = []
for opt in option._short_opts:
if self._short_opt.has_key(opt):
if opt in self._short_opt:
conflict_opts.append((opt, self._short_opt[opt]))
for opt in option._long_opts:
if self._long_opt.has_key(opt):
if opt in self._long_opt:
conflict_opts.append((opt, self._long_opt[opt]))
if conflict_opts:
@ -1023,7 +1023,7 @@ def add_option(self, *args, **kwargs):
if option.dest is not None: # option has a dest, we need a default
if option.default is not NO_DEFAULT:
self.defaults[option.dest] = option.default
elif not self.defaults.has_key(option.dest):
elif option.dest not in self.defaults:
self.defaults[option.dest] = None
return option
@ -1039,8 +1039,8 @@ def get_option(self, opt_str):
self._long_opt.get(opt_str))
def has_option(self, opt_str):
return (self._short_opt.has_key(opt_str) or
self._long_opt.has_key(opt_str))
return (opt_str in self._short_opt or
opt_str) in self._long_opt
def remove_option(self, opt_str):
option = self._short_opt.get(opt_str)
@ -1658,7 +1658,7 @@ def _match_abbrev(s, wordmap):
'words', raise BadOptionError.
"""
# Is there an exact match?
if wordmap.has_key(s):
if s in wordmap:
return s
else:
# Isolate all words with s as a prefix.

View file

@ -436,8 +436,6 @@ def __delitem__(self, key):
def __delitem__(self, key):
unsetenv(key)
del self.data[key.upper()]
def has_key(self, key):
return key.upper() in self.data
def __contains__(self, key):
return key.upper() in self.data
def get(self, key, failobj=None):

View file

@ -1287,19 +1287,19 @@ def decode_long(data):
r"""Decode a long from a two's complement little-endian binary string.
>>> decode_long('')
0L
0
>>> decode_long("\xff\x00")
255L
255
>>> decode_long("\xff\x7f")
32767L
32767
>>> decode_long("\x00\xff")
-256L
-256
>>> decode_long("\x00\x80")
-32768L
-32768
>>> decode_long("\x80")
-128L
-128
>>> decode_long("\x7f")
127L
127
"""
nbytes = len(data)

View file

@ -517,23 +517,14 @@ def read_decimalnl_long(f):
r"""
>>> import StringIO
>>> read_decimalnl_long(StringIO.StringIO("1234\n56"))
Traceback (most recent call last):
...
ValueError: trailing 'L' required in '1234'
Someday the trailing 'L' will probably go away from this output.
>>> read_decimalnl_long(StringIO.StringIO("1234L\n56"))
1234L
1234
>>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6"))
123456789012345678901234L
123456789012345678901234
"""
s = read_stringnl(f, decode=False, stripquotes=False)
if not s.endswith("L"):
raise ValueError("trailing 'L' required in %r" % s)
return long(s)
@ -625,15 +616,15 @@ def read_long1(f):
r"""
>>> import StringIO
>>> read_long1(StringIO.StringIO("\x00"))
0L
0
>>> read_long1(StringIO.StringIO("\x02\xff\x00"))
255L
255
>>> read_long1(StringIO.StringIO("\x02\xff\x7f"))
32767L
32767
>>> read_long1(StringIO.StringIO("\x02\x00\xff"))
-256L
-256
>>> read_long1(StringIO.StringIO("\x02\x00\x80"))
-32768L
-32768
"""
n = read_uint1(f)
@ -657,15 +648,15 @@ def read_long4(f):
r"""
>>> import StringIO
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00"))
255L
255
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f"))
32767L
32767
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff"))
-256L
-256
>>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80"))
-32768L
-32768
>>> read_long1(StringIO.StringIO("\x00\x00\x00\x00"))
0L
0
"""
n = read_int4(f)

View file

@ -877,7 +877,7 @@ def architecture(executable=sys.executable,bits='',linkage=''):
executable == sys.executable:
# "file" command did not return anything; we'll try to provide
# some sensible defaults then...
if _default_architecture.has_key(sys.platform):
if sys.platform in _default_architecture:
b,l = _default_architecture[sys.platform]
if b:
bits = b

View file

@ -318,7 +318,7 @@ def trace_dispatch_c_call (self, frame, t):
fn = ("", 0, self.c_func_name)
self.cur = (t, 0, 0, fn, frame, self.cur)
timings = self.timings
if timings.has_key(fn):
if fn in timings:
cc, ns, tt, ct, callers = timings[fn]
timings[fn] = cc, ns+1, tt, ct, callers
else:

View file

@ -140,7 +140,7 @@ def get_top_level_stats(self):
self.total_calls += nc
self.prim_calls += cc
self.total_tt += tt
if callers.has_key(("jprofile", 0, "profiler")):
if ("jprofile", 0, "profiler") in callers:
self.top_level[func] = None
if len(func_std_string(func)) > self.max_name_len:
self.max_name_len = len(func_std_string(func))

View file

@ -428,10 +428,6 @@ def setdefault(self, name, default=""):
self.dict[lowername] = default
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict

View file

@ -231,7 +231,7 @@ def intersection(self, other):
little, big = self, other
else:
little, big = other, self
common = ifilter(big._data.has_key, little)
common = ifilter(big._data.__contains__, little)
return self.__class__(common)
def __xor__(self, other):
@ -256,9 +256,9 @@ def symmetric_difference(self, other):
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
for elt in ifilterfalse(otherdata.has_key, selfdata):
for elt in ifilterfalse(otherdata.__contains__, selfdata):
data[elt] = value
for elt in ifilterfalse(selfdata.has_key, otherdata):
for elt in ifilterfalse(selfdata.__contains__, otherdata):
data[elt] = value
return result
@ -283,7 +283,7 @@ def difference(self, other):
except AttributeError:
otherdata = Set(other)._data
value = True
for elt in ifilterfalse(otherdata.has_key, self):
for elt in ifilterfalse(otherdata.__contains__, self):
data[elt] = value
return result
@ -309,7 +309,7 @@ def issubset(self, other):
self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(other._data.has_key, self):
for elt in ifilterfalse(other._data.__contains__, self):
return False
return True
@ -318,7 +318,7 @@ def issuperset(self, other):
self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(self._data.has_key, other):
for elt in ifilterfalse(self._data.__contains__, other):
return False
return True
@ -501,7 +501,7 @@ def difference_update(self, other):
other = Set(other)
if self is other:
self.clear()
for elt in ifilter(data.has_key, other):
for elt in ifilter(data.__contains__, other):
del data[elt]
# Python dict-like mass mutations: update, clear

View file

@ -20,7 +20,7 @@
# access returns a *copy* of the entry!
del d[key] # delete data stored at key (raises KeyError
# if no such key)
flag = d.has_key(key) # true if the key exists; same as "key in d"
flag = key in d # true if the key exists
list = d.keys() # a list of all existing keys (slow!)
d.close() # close it
@ -94,14 +94,11 @@ def keys(self):
def __len__(self):
return len(self.dict)
def has_key(self, key):
return self.dict.has_key(key)
def __contains__(self, key):
return self.dict.has_key(key)
return key in self.dict
def get(self, key, default=None):
if self.dict.has_key(key):
if key in self.dict:
return self[key]
return default

View file

@ -54,12 +54,10 @@ def test_read(self):
#len
self.assertEqual(len(p), 0)
self.assertEqual(len(d), len(self.reference))
#has_key
#__contains__
for k in self.reference:
self.assert_(d.has_key(k))
self.assert_(k in d)
for k in self.other:
self.failIf(d.has_key(k))
self.failIf(k in d)
#cmp
self.assertEqual(cmp(p,p), 0)
@ -333,16 +331,6 @@ def test_items(self):
d = self._full_mapping({1:2})
self.assertEqual(d.items(), [(1, 2)])
def test_has_key(self):
d = self._empty_mapping()
self.assert_(not d.has_key('a'))
d = self._full_mapping({'a': 1, 'b': 2})
k = d.keys()
k.sort()
self.assertEqual(k, ['a', 'b'])
self.assertRaises(TypeError, d.has_key)
def test_contains(self):
d = self._empty_mapping()
self.assert_(not ('a' in d))

View file

@ -1034,7 +1034,6 @@ def printlist(x, width=70, indent=4):
'darwin':
"""
test_al
test_bsddb3
test_cd
test_cl
test_gdbm

View file

@ -24,7 +24,7 @@ def check_all(self, modname):
"%s has no __all__ attribute" % modname)
names = {}
exec "from %s import *" % modname in names
if names.has_key("__builtins__"):
if "__builtins__" in names:
del names["__builtins__"]
keys = set(names)
all = set(sys.modules[modname].__all__)

View file

@ -183,9 +183,9 @@ def test_issubclass(self):
self.assertIs(issubclass(bool, int), True)
self.assertIs(issubclass(int, bool), False)
def test_haskey(self):
self.assertIs({}.has_key(1), False)
self.assertIs({1:1}.has_key(1), True)
def test_contains(self):
self.assertIs(1 in {}, False)
self.assertIs(1 in {1:1}, True)
def test_string(self):
self.assertIs("xyz".endswith("z"), True)

View file

@ -135,11 +135,6 @@ def test_contains(self):
self.assert_(k in self.f)
self.assert_('not here' not in self.f)
def test_has_key(self):
for k in self.d:
self.assert_(self.f.has_key(k))
self.assert_(not self.f.has_key('not here'))
def test_clear(self):
self.f.clear()
self.assertEqual(len(self.f), 0)

View file

@ -630,9 +630,9 @@ def __hash__(self):
def test_hex(self):
self.assertEqual(hex(16), '0x10')
self.assertEqual(hex(16L), '0x10L')
self.assertEqual(hex(16L), '0x10')
self.assertEqual(hex(-16), '-0x10')
self.assertEqual(hex(-16L), '-0x10L')
self.assertEqual(hex(-16L), '-0x10')
self.assertRaises(TypeError, hex, {})
def test_id(self):
@ -1240,9 +1240,9 @@ def __cmp__(self, other):
def test_oct(self):
self.assertEqual(oct(100), '0144')
self.assertEqual(oct(100L), '0144L')
self.assertEqual(oct(100L), '0144')
self.assertEqual(oct(-100), '-0144')
self.assertEqual(oct(-100L), '-0144L')
self.assertEqual(oct(-100L), '-0144')
self.assertRaises(TypeError, oct, ())
def write_testfile(self):
@ -1441,7 +1441,7 @@ def test_reload(self):
def test_repr(self):
self.assertEqual(repr(''), '\'\'')
self.assertEqual(repr(0), '0')
self.assertEqual(repr(0L), '0L')
self.assertEqual(repr(0L), '0')
self.assertEqual(repr(()), '()')
self.assertEqual(repr([]), '[]')
self.assertEqual(repr({}), '{}')

View file

@ -9,39 +9,39 @@
class CFunctionCalls(unittest.TestCase):
def test_varargs0(self):
self.assertRaises(TypeError, {}.has_key)
self.assertRaises(TypeError, {}.__contains__)
def test_varargs1(self):
{}.has_key(0)
{}.__contains__(0)
def test_varargs2(self):
self.assertRaises(TypeError, {}.has_key, 0, 1)
self.assertRaises(TypeError, {}.__contains__, 0, 1)
def test_varargs0_ext(self):
try:
{}.has_key(*())
{}.__contains__(*())
except TypeError:
pass
def test_varargs1_ext(self):
{}.has_key(*(0,))
{}.__contains__(*(0,))
def test_varargs2_ext(self):
try:
{}.has_key(*(1, 2))
{}.__contains__(*(1, 2))
except TypeError:
pass
else:
raise RuntimeError
def test_varargs0_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2)
self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs1_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2)
self.assertRaises(TypeError, {}.__contains__, x=2)
def test_varargs2_kw(self):
self.assertRaises(TypeError, {}.has_key, x=2, y=2)
self.assertRaises(TypeError, {}.__contains__, x=2, y=2)
def test_oldargs0_0(self):
{}.keys()

View file

@ -158,10 +158,10 @@ def main():
# test individual fields
for key in expect.keys():
expect_val = expect[key]
verify(fcd.has_key(key))
verify(key in fcd)
verify(norm(fcd[key]) == norm(expect[key]))
verify(fcd.get(key, "default") == fcd[key])
verify(fs.has_key(key))
verify(key in fs)
if len(expect_val) > 1:
single_value = 0
else:

View file

@ -28,7 +28,7 @@ def test_keys():
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if 'a' in d:
if verbose:
print 'Test dbm keys: ', d.keys()

View file

@ -21,8 +21,8 @@ def test_keys(self):
self.assertEqual(d.keys(), [])
d = {'a': 1, 'b': 2}
k = d.keys()
self.assert_(d.has_key('a'))
self.assert_(d.has_key('b'))
self.assert_('a' in d)
self.assert_('b' in d)
self.assertRaises(TypeError, d.keys, None)
@ -43,16 +43,6 @@ def test_items(self):
self.assertRaises(TypeError, d.items, None)
def test_has_key(self):
d = {}
self.assert_(not d.has_key('a'))
d = {'a': 1, 'b': 2}
k = d.keys()
k.sort()
self.assertEqual(k, ['a', 'b'])
self.assertRaises(TypeError, d.has_key)
def test_contains(self):
d = {}
self.assert_(not ('a' in d))

View file

@ -17,7 +17,7 @@
if verbose:
print 'Test gdbm file keys: ', a
g.has_key('a')
'a' in g
g.close()
try:
g['a']

View file

@ -472,7 +472,7 @@ def f():
f()
g = {}
exec 'z = 1' in g
if g.has_key('__builtins__'): del g['__builtins__']
if '__builtins__' in g: del g['__builtins__']
if g != {'z': 1}: raise TestFailed, 'exec \'z = 1\' in g'
g = {}
l = {}
@ -480,8 +480,8 @@ def f():
import warnings
warnings.filterwarnings("ignore", "global statement", module="<string>")
exec 'global a; a = 1; b = 2' in g, l
if g.has_key('__builtins__'): del g['__builtins__']
if l.has_key('__builtins__'): del l['__builtins__']
if '__builtins__' in g: del g['__builtins__']
if '__builtins__' in l: del l['__builtins__']
if (g, l) != ({'a':1}, {'b':2}): raise TestFailed, 'exec ... in g (%s), l (%s)' %(g,l)

View file

@ -229,16 +229,9 @@ def _check_iteration(self, method, do_keys, do_values, repetitions=10):
count += 1
self.assert_(len(values) == count)
def test_has_key(self):
# Check existence of keys using has_key()
self._test_has_key_or_contains(self._box.has_key)
def test_contains(self):
# Check existence of keys using __contains__()
self._test_has_key_or_contains(self._box.__contains__)
def _test_has_key_or_contains(self, method):
# (Used by test_has_key() and test_contains().)
method = self._box.__contains__
self.assert_(not method('foo'))
key0 = self._box.add(self._template % 0)
self.assert_(method(key0))
@ -442,7 +435,7 @@ def test_notimplemented(self):
self.assertRaises(NotImplementedError, lambda: box.get_message(''))
self.assertRaises(NotImplementedError, lambda: box.get_string(''))
self.assertRaises(NotImplementedError, lambda: box.get_file(''))
self.assertRaises(NotImplementedError, lambda: box.has_key(''))
self.assertRaises(NotImplementedError, lambda: '' in box)
self.assertRaises(NotImplementedError, lambda: box.__contains__(''))
self.assertRaises(NotImplementedError, lambda: box.__len__())
self.assertRaises(NotImplementedError, lambda: box.clear())

View file

@ -297,7 +297,7 @@ def test_mapping_file(self):
continue
unich = unichrs(data[1])
if ord(unich) == 0xfffd or urt_wa.has_key(unich):
if ord(unich) == 0xfffd or unich in urt_wa:
continue
urt_wa[unich] = csetch

View file

@ -25,7 +25,6 @@ def __cmp__(self, other):
for stmt in ['d[x2] = 2',
'z = d[x2]',
'x2 in d',
'd.has_key(x2)',
'd.get(x2)',
'd.setdefault(x2, 42)',
'd.pop(x2)',

View file

@ -6,14 +6,14 @@ class TestImport(unittest.TestCase):
def __init__(self, *args, **kw):
self.package_name = 'PACKAGE_'
while sys.modules.has_key(self.package_name):
while self.package_name in sys.modules:
self.package_name += random.choose(string.letters)
self.module_name = self.package_name + '.foo'
unittest.TestCase.__init__(self, *args, **kw)
def remove_modules(self):
for module_name in (self.package_name, self.module_name):
if sys.modules.has_key(module_name):
if module_name in sys.modules:
del sys.modules[module_name]
def setUp(self):
@ -52,7 +52,7 @@ def test_package_import__semantics(self):
try: __import__(self.module_name)
except SyntaxError: pass
else: raise RuntimeError, 'Failed to induce SyntaxError'
self.assert_(not sys.modules.has_key(self.module_name) and
self.assert_(self.module_name not in sys.modules and
not hasattr(sys.modules[self.package_name], 'foo'))
# ...make up a variable name that isn't bound in __builtins__

View file

@ -40,11 +40,11 @@ def assertHasattr(self, obj, attr, ignore):
def assertHaskey(self, obj, key, ignore):
''' succeed iff obj.has_key(key) or key in ignore. '''
''' succeed iff key in obj or key in ignore. '''
if key in ignore: return
if not obj.has_key(key):
if key not in obj:
print >>sys.stderr, "***",key
self.failUnless(obj.has_key(key))
self.failUnless(key) in obj
def assertEqualsOrIgnored(self, a, b, ignore):
''' succeed iff a == b or a in ignore or b in ignore '''

View file

@ -25,7 +25,7 @@ def test_get(self):
def test_setdefault(self):
msg = self.create_message(
'To: "last, first" <userid@foo.net>\n\ntest\n')
self.assert_(not msg.has_key("New-Header"))
self.assert_("New-Header" not in msg)
self.assert_(msg.setdefault("New-Header", "New-Value") == "New-Value")
self.assert_(msg.setdefault("New-Header", "Different-Value")
== "New-Value")

View file

@ -357,7 +357,7 @@ def test_expat_nsattrs_wattr():
attrs.getNames() == [(ns_uri, "attr")] and \
(attrs.getQNames() == [] or attrs.getQNames() == ["ns:attr"]) and \
len(attrs) == 1 and \
attrs.has_key((ns_uri, "attr")) and \
(ns_uri, "attr") in attrs and \
attrs.keys() == [(ns_uri, "attr")] and \
attrs.get((ns_uri, "attr")) == "val" and \
attrs.get((ns_uri, "attr"), 25) == "val" and \
@ -571,7 +571,7 @@ def verify_empty_attrs(attrs):
attrs.getNames() == [] and \
attrs.getQNames() == [] and \
len(attrs) == 0 and \
not attrs.has_key("attr") and \
"attr" not in attrs and \
attrs.keys() == [] and \
attrs.get("attrs") is None and \
attrs.get("attrs", 25) == 25 and \
@ -584,7 +584,7 @@ def verify_attrs_wattr(attrs):
attrs.getNames() == ["attr"] and \
attrs.getQNames() == ["attr"] and \
len(attrs) == 1 and \
attrs.has_key("attr") and \
"attr" in attrs and \
attrs.keys() == ["attr"] and \
attrs.get("attr") == "val" and \
attrs.get("attr", 25) == "val" and \
@ -639,7 +639,7 @@ def verify_empty_nsattrs(attrs):
attrs.getNames() == [] and \
attrs.getQNames() == [] and \
len(attrs) == 0 and \
not attrs.has_key((ns_uri, "attr")) and \
(ns_uri, "attr") not in attrs and \
attrs.keys() == [] and \
attrs.get((ns_uri, "attr")) is None and \
attrs.get((ns_uri, "attr"), 25) == 25 and \
@ -658,7 +658,7 @@ def test_nsattrs_wattr():
attrs.getNames() == [(ns_uri, "attr")] and \
attrs.getQNames() == ["ns:attr"] and \
len(attrs) == 1 and \
attrs.has_key((ns_uri, "attr")) and \
(ns_uri, "attr") in attrs and \
attrs.keys() == [(ns_uri, "attr")] and \
attrs.get((ns_uri, "attr")) == "val" and \
attrs.get((ns_uri, "attr"), 25) == "val" and \

View file

@ -472,7 +472,7 @@ def h(z):
return g
d = f(2)(4)
verify(d.has_key('h'))
verify('h' in d)
del d['h']
vereq(d, {'x': 2, 'y': 7, 'w': 6})

View file

@ -216,7 +216,7 @@ def test_setdefaultencoding_removed(self):
def test_sitecustomize_executed(self):
# If sitecustomize is available, it should have been imported.
if not sys.modules.has_key("sitecustomize"):
if "sitecustomize" not in sys.modules:
try:
import sitecustomize
except ImportError:

View file

@ -266,7 +266,7 @@ def test_one(self, x, pack=struct.pack,
if x < 0:
expected += 1L << self.bitsize
assert expected > 0
expected = hex(expected)[2:-1] # chop "0x" and trailing 'L'
expected = hex(expected)[2:] # chop "0x"
if len(expected) & 1:
expected = "0" + expected
expected = unhexlify(expected)
@ -322,7 +322,7 @@ def test_one(self, x, pack=struct.pack,
# Try big-endian.
format = ">" + code
expected = long(x)
expected = hex(expected)[2:-1] # chop "0x" and trailing 'L'
expected = hex(expected)[2:] # chop "0x"
if len(expected) & 1:
expected = "0" + expected
expected = unhexlify(expected)

View file

@ -180,7 +180,7 @@ def test_tzset(self):
# rely on it.
if org_TZ is not None:
environ['TZ'] = org_TZ
elif environ.has_key('TZ'):
elif 'TZ' in environ:
del environ['TZ']
time.tzset()

View file

@ -653,7 +653,7 @@ def getresponse(self):
r.info; r.geturl # addinfourl methods
r.code, r.msg == 200, "OK" # added from MockHTTPClass.getreply()
hdrs = r.info()
hdrs.get; hdrs.has_key # r.info() gives dict from .getreply()
hdrs.get; hdrs.__contains__ # r.info() gives dict from .getreply()
self.assertEqual(r.geturl(), url)
self.assertEqual(http.host, "example.com")

View file

@ -94,13 +94,10 @@ def display(self): print self
self.assertEqual(u2.items(), d2.items())
self.assertEqual(u2.values(), d2.values())
# Test has_key and "in".
# Test "in".
for i in u2.keys():
self.assert_(u2.has_key(i))
self.assert_(i in u2)
self.assertEqual(u1.has_key(i), d1.has_key(i))
self.assertEqual(i in u1, i in d1)
self.assertEqual(u0.has_key(i), d0.has_key(i))
self.assertEqual(i in u0, i in d0)
# Test update
@ -132,7 +129,7 @@ def items(self):
# Test setdefault
t = UserDict.UserDict()
self.assertEqual(t.setdefault("x", 42), 42)
self.assert_(t.has_key("x"))
self.assert_("x" in t)
self.assertEqual(t.setdefault("x", 23), 42)
# Test pop
@ -269,9 +266,6 @@ def test_all(self):
self.assertEqual(s.keys(), [10, 30])
## Now, test the DictMixin methods one by one
# has_key
self.assert_(s.has_key(10))
self.assert_(not s.has_key(20))
# __contains__
self.assert_(10 in s)

View file

@ -739,7 +739,7 @@ def test_weak_values(self):
def test_weak_keys(self):
#
# This exercises d.copy(), d.items(), d[] = v, d[], del d[],
# len(d), d.has_key().
# len(d), k in d.
#
dict, objects = self.make_weak_keyed_dict()
for o in objects:
@ -761,8 +761,8 @@ def test_weak_keys(self):
"deleting the keys did not clear the dictionary")
o = Object(42)
dict[o] = "What is the meaning of the universe?"
self.assert_(dict.has_key(o))
self.assert_(not dict.has_key(34))
self.assert_(o in dict)
self.assert_(34 not in dict)
def test_weak_keyed_iters(self):
dict, objects = self.make_weak_keyed_dict()
@ -774,7 +774,7 @@ def test_weak_keyed_iters(self):
objects2 = list(objects)
for wr in refs:
ob = wr()
self.assert_(dict.has_key(ob))
self.assert_(ob in dict)
self.assert_(ob in dict)
self.assertEqual(ob.arg, dict[ob])
objects2.remove(ob)
@ -785,7 +785,7 @@ def test_weak_keyed_iters(self):
self.assertEqual(len(list(dict.iterkeyrefs())), len(objects))
for wr in dict.iterkeyrefs():
ob = wr()
self.assert_(dict.has_key(ob))
self.assert_(ob in dict)
self.assert_(ob in dict)
self.assertEqual(ob.arg, dict[ob])
objects2.remove(ob)
@ -900,13 +900,13 @@ def check_setdefault(self, klass, key, value1, value2):
weakdict = klass()
o = weakdict.setdefault(key, value1)
self.assert_(o is value1)
self.assert_(weakdict.has_key(key))
self.assert_(key in weakdict)
self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1)
o = weakdict.setdefault(key, value2)
self.assert_(o is value1)
self.assert_(weakdict.has_key(key))
self.assert_(key in weakdict)
self.assert_(weakdict.get(key) is value1)
self.assert_(weakdict[key] is value1)
@ -920,20 +920,20 @@ def test_weak_keyed_dict_setdefault(self):
def check_update(self, klass, dict):
#
# This exercises d.update(), len(d), d.keys(), d.has_key(),
# This exercises d.update(), len(d), d.keys(), k in d,
# d.get(), d[].
#
weakdict = klass()
weakdict.update(dict)
self.assert_(len(weakdict) == len(dict))
for k in weakdict.keys():
self.assert_(dict.has_key(k),
self.assert_(k in dict,
"mysterious new key appeared in weak dict")
v = dict.get(k)
self.assert_(v is weakdict[k])
self.assert_(v is weakdict.get(k))
for k in dict.keys():
self.assert_(weakdict.has_key(k),
self.assert_(k in weakdict,
"original key disappeared in weak dict")
v = dict[k]
self.assert_(v is weakdict[k])

View file

@ -341,7 +341,7 @@ def testMappingInterface(self):
del h['foo'] # should not raise an error
h['Foo'] = 'bar'
for m in h.has_key, h.__contains__, h.get, h.get_all, h.__getitem__:
for m in h.__contains__, h.get, h.get_all, h.__getitem__:
self.failUnless(m('foo'))
self.failUnless(m('Foo'))
self.failUnless(m('FOO'))
@ -424,10 +424,10 @@ def checkOSEnviron(self,handler):
env = handler.environ
from os import environ
for k,v in environ.items():
if not empty.has_key(k):
if k not in empty:
self.assertEqual(env[k],v)
for k,v in empty.items():
self.failUnless(env.has_key(k))
self.failUnless(k in env)
def testEnviron(self):
h = TestHandler(X="Y")
@ -440,7 +440,7 @@ def testCGIEnviron(self):
h = BaseCGIHandler(None,None,None,{})
h.setup_environ()
for key in 'wsgi.url_scheme', 'wsgi.input', 'wsgi.errors':
self.assert_(h.environ.has_key(key))
self.assert_(key in h.environ)
def testScheme(self):
h=TestHandler(HTTPS="on"); h.setup_environ()

View file

@ -120,7 +120,7 @@ def __init__(self, modules = None, dirs = None):
self._ignore = { '<string>': 1 }
def names(self, filename, modulename):
if self._ignore.has_key(modulename):
if modulename in self._ignore:
return self._ignore[modulename]
# haven't seen this one before, so see if the module name is

View file

@ -153,7 +153,7 @@ def _exc_info_to_string(self, err, test):
return ''.join(traceback.format_exception(exctype, value, tb))
def _is_relevant_tb_level(self, tb):
return tb.tb_frame.f_globals.has_key('__unittest')
return '__unittest' in tb.tb_frame.f_globals
def _count_relevant_tb_levels(self, tb):
length = 0

View file

@ -114,7 +114,7 @@ class URLopener:
def __init__(self, proxies=None, **x509):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')

View file

@ -660,7 +660,7 @@ class ProxyHandler(BaseHandler):
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,

View file

@ -64,13 +64,6 @@ def __contains__(self, key):
return False
return o is not None
def has_key(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def __repr__(self):
return "<WeakValueDictionary at %s>" % id(self)
@ -259,13 +252,6 @@ def copy(self):
def get(self, key, default=None):
return self.data.get(ref(key),default)
def has_key(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def __contains__(self, key):
try:
wr = ref(key)

View file

@ -159,7 +159,7 @@ def cleanup_headers(self):
Subclasses can extend this to add other defaults.
"""
if not self.headers.has_key('Content-Length'):
if 'Content-Length' not in self.headers:
self.set_content_length()
def start_response(self, status, headers,exc_info=None):
@ -194,11 +194,11 @@ def send_preamble(self):
if self.origin_server:
if self.client_is_modern():
self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
if not self.headers.has_key('Date'):
if 'Date' not in self.headers:
self._write(
'Date: %s\r\n' % format_date_time(time.time())
)
if self.server_software and not self.headers.has_key('Server'):
if self.server_software and 'Server' not in self.headers:
self._write('Server: %s\r\n' % self.server_software)
else:
self._write('Status: %s\r\n' % self.status)

View file

@ -80,12 +80,10 @@ def __getitem__(self,name):
def has_key(self, name):
def __contains__(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
__contains__ = has_key
def get_all(self, name):
"""Return a list of all the values for the named field.

View file

@ -166,7 +166,7 @@ def setup_testing_defaults(environ):
'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1
}.has_key
}.__contains__
def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""

View file

@ -345,7 +345,7 @@ def check_environ(environ):
"Invalid CONTENT_LENGTH: %r" % environ['CONTENT_LENGTH'])
if not environ.get('SCRIPT_NAME'):
assert_(environ.has_key('PATH_INFO'),
assert_('PATH_INFO' in environ,
"One of SCRIPT_NAME or PATH_INFO are required (PATH_INFO "
"should at least be '/' if SCRIPT_NAME is empty)")
assert_(environ.get('SCRIPT_NAME') != '/',

View file

@ -686,7 +686,7 @@ def dump_unicode(self, value, write, escape=escape):
def dump_array(self, value, write):
i = id(value)
if self.memo.has_key(i):
if i in self.memo:
raise TypeError, "cannot marshal recursive sequences"
self.memo[i] = None
dump = self.__dump
@ -700,7 +700,7 @@ def dump_array(self, value, write):
def dump_struct(self, value, write, escape=escape):
i = id(value)
if self.memo.has_key(i):
if i in self.memo:
raise TypeError, "cannot marshal recursive dictionaries"
self.memo[i] = None
dump = self.__dump

View file

@ -461,7 +461,7 @@ bsddb_keys(bsddbobject *dp)
}
static PyObject *
bsddb_has_key(bsddbobject *dp, PyObject *args)
bsddb_contains(bsddbobject *dp, PyObject *args)
{
DBT krec, drec;
int status;
@ -640,7 +640,7 @@ bsddb_sync(bsddbobject *dp)
static PyMethodDef bsddb_methods[] = {
{"close", (PyCFunction)bsddb_close, METH_NOARGS},
{"keys", (PyCFunction)bsddb_keys, METH_NOARGS},
{"has_key", (PyCFunction)bsddb_has_key, METH_VARARGS},
{"__contains__", (PyCFunction)bsddb_contains, METH_VARARGS},
{"set_location", (PyCFunction)bsddb_set_location, METH_VARARGS},
{"next", (PyCFunction)bsddb_next, METH_NOARGS},
{"previous", (PyCFunction)bsddb_previous, METH_NOARGS},

View file

@ -206,12 +206,12 @@ dbm_keys(register dbmobject *dp, PyObject *unused)
}
static PyObject *
dbm_has_key(register dbmobject *dp, PyObject *args)
dbm_contains(register dbmobject *dp, PyObject *args)
{
datum key, val;
int tmp_size;
if (!PyArg_ParseTuple(args, "s#:has_key", &key.dptr, &tmp_size))
if (!PyArg_ParseTuple(args, "s#:__contains__", &key.dptr, &tmp_size))
return NULL;
key.dsize = tmp_size;
check_dbmobject_open(dp);
@ -277,8 +277,8 @@ static PyMethodDef dbm_methods[] = {
"close()\nClose the database."},
{"keys", (PyCFunction)dbm_keys, METH_NOARGS,
"keys() -> list\nReturn a list of all keys in the database."},
{"has_key", (PyCFunction)dbm_has_key, METH_VARARGS,
"has_key(key} -> boolean\nReturn true iff key is in the database."},
{"__contains__",(PyCFunction)dbm_contains, METH_VARARGS,
"__contains__(key} -> boolean\True iff key is in the database."},
{"get", (PyCFunction)dbm_get, METH_VARARGS,
"get(key[, default]) -> value\n"
"Return the value for key if present, otherwise default."},

View file

@ -241,16 +241,16 @@ dbm_keys(register dbmobject *dp, PyObject *unused)
return v;
}
PyDoc_STRVAR(dbm_has_key__doc__,
"has_key(key) -> boolean\n\
PyDoc_STRVAR(dbm_contains__doc__,
"__contains__(key) -> bool\n\
Find out whether or not the database contains a given key.");
static PyObject *
dbm_has_key(register dbmobject *dp, PyObject *args)
dbm_contains(register dbmobject *dp, PyObject *args)
{
datum key;
if (!PyArg_ParseTuple(args, "s#:has_key", &key.dptr, &key.dsize))
if (!PyArg_ParseTuple(args, "s#:contains", &key.dptr, &key.dsize))
return NULL;
check_dbmobject_open(dp);
return PyInt_FromLong((long) gdbm_exists(dp->di_dbm, key));
@ -355,7 +355,7 @@ dbm_sync(register dbmobject *dp, PyObject *unused)
static PyMethodDef dbm_methods[] = {
{"close", (PyCFunction)dbm_close, METH_NOARGS, dbm_close__doc__},
{"keys", (PyCFunction)dbm_keys, METH_NOARGS, dbm_keys__doc__},
{"has_key", (PyCFunction)dbm_has_key, METH_VARARGS, dbm_has_key__doc__},
{"__contains__",(PyCFunction)dbm_contains,METH_VARARGS, dbm_contains__doc__},
{"firstkey", (PyCFunction)dbm_firstkey,METH_NOARGS, dbm_firstkey__doc__},
{"nextkey", (PyCFunction)dbm_nextkey, METH_VARARGS, dbm_nextkey__doc__},
{"reorganize",(PyCFunction)dbm_reorganize,METH_NOARGS, dbm_reorganize__doc__},

View file

@ -700,15 +700,6 @@ static PySequenceMethods proxy_as_sequence = {
0, /* sq_inplace_repeat */
};
static PyObject *
proxy_has_key(proxyobject *pp, PyObject *key)
{
int res = PyDict_Contains(pp->dict, key);
if (res < 0)
return NULL;
return PyBool_FromLong(res);
}
static PyObject *
proxy_get(proxyobject *pp, PyObject *args)
{
@ -761,10 +752,8 @@ proxy_copy(proxyobject *pp)
}
static PyMethodDef proxy_methods[] = {
{"has_key", (PyCFunction)proxy_has_key, METH_O,
PyDoc_STR("D.has_key(k) -> True if D has a key k, else False")},
{"get", (PyCFunction)proxy_get, METH_VARARGS,
PyDoc_STR("D.get(k[,d]) -> D[k] if D.has_key(k), else d."
PyDoc_STR("D.get(k[,d]) -> D[k] if k in D, else d."
" d defaults to None.")},
{"keys", (PyCFunction)proxy_keys, METH_NOARGS,
PyDoc_STR("D.keys() -> list of D's keys")},

View file

@ -1621,7 +1621,7 @@ dict_richcompare(PyObject *v, PyObject *w, int op)
}
static PyObject *
dict_has_key(register dictobject *mp, PyObject *key)
dict_contains(register dictobject *mp, PyObject *key)
{
long hash;
dictentry *ep;
@ -1856,9 +1856,6 @@ dict_iteritems(dictobject *dict)
}
PyDoc_STRVAR(has_key__doc__,
"D.has_key(k) -> True if D has a key k, else False");
PyDoc_STRVAR(contains__doc__,
"D.__contains__(k) -> True if D has a key k, else False");
@ -1911,12 +1908,10 @@ PyDoc_STRVAR(iteritems__doc__,
"D.iteritems() -> an iterator over the (key, value) items of D");
static PyMethodDef mapp_methods[] = {
{"__contains__",(PyCFunction)dict_has_key, METH_O | METH_COEXIST,
{"__contains__",(PyCFunction)dict_contains, METH_O | METH_COEXIST,
contains__doc__},
{"__getitem__", (PyCFunction)dict_subscript, METH_O | METH_COEXIST,
getitem__doc__},
{"has_key", (PyCFunction)dict_has_key, METH_O,
has_key__doc__},
{"get", (PyCFunction)dict_get, METH_VARARGS,
get__doc__},
{"setdefault", (PyCFunction)dict_setdefault, METH_VARARGS,

View file

@ -4432,7 +4432,6 @@ slot_tp_hash(PyObject *self)
}
if (func == NULL) {
PyErr_Clear();
PyErr_Format(PyExc_TypeError, "unhashable type: '%.200s'",
self->ob_type->tp_name);
return -1;

View file

@ -674,7 +674,7 @@ class db_found(Exception): pass
db_minor = int(m.group(1))
db_ver = (db_major, db_minor)
if ( (not db_ver_inc_map.has_key(db_ver)) and
if ( (db_ver not in db_ver_inc_map) and
(db_ver <= max_db_ver and db_ver >= min_db_ver) ):
# save the include directory with the db.h version
# (first occurrance only)