bpo-30296 Remove unnecessary tuples, lists, sets, and dicts (#1489)
* Replaced list(<generator expression>) with list comprehension * Replaced dict(<generator expression>) with dict comprehension * Replaced set(<list literal>) with set literal * Replaced builtin func(<list comprehension>) with func(<generator expression>) when supported (e.g. any(), all(), tuple(), min(), & max())
This commit is contained in:
parent
906f5330b9
commit
3972628de3
|
@ -157,19 +157,19 @@ class WeakSet:
|
||||||
__le__ = issubset
|
__le__ = issubset
|
||||||
|
|
||||||
def __lt__(self, other):
|
def __lt__(self, other):
|
||||||
return self.data < set(ref(item) for item in other)
|
return self.data < set(map(ref, other))
|
||||||
|
|
||||||
def issuperset(self, other):
|
def issuperset(self, other):
|
||||||
return self.data.issuperset(ref(item) for item in other)
|
return self.data.issuperset(ref(item) for item in other)
|
||||||
__ge__ = issuperset
|
__ge__ = issuperset
|
||||||
|
|
||||||
def __gt__(self, other):
|
def __gt__(self, other):
|
||||||
return self.data > set(ref(item) for item in other)
|
return self.data > set(map(ref, other))
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if not isinstance(other, self.__class__):
|
if not isinstance(other, self.__class__):
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
return self.data == set(ref(item) for item in other)
|
return self.data == set(map(ref, other))
|
||||||
|
|
||||||
def symmetric_difference(self, other):
|
def symmetric_difference(self, other):
|
||||||
newset = self.copy()
|
newset = self.copy()
|
||||||
|
|
|
@ -255,7 +255,7 @@ def query_vcvarsall(version, arch="x86"):
|
||||||
"""Launch vcvarsall.bat and read the settings from its environment
|
"""Launch vcvarsall.bat and read the settings from its environment
|
||||||
"""
|
"""
|
||||||
vcvarsall = find_vcvarsall(version)
|
vcvarsall = find_vcvarsall(version)
|
||||||
interesting = set(("include", "lib", "libpath", "path"))
|
interesting = {"include", "lib", "libpath", "path"}
|
||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
if vcvarsall is None:
|
if vcvarsall is None:
|
||||||
|
|
|
@ -369,8 +369,8 @@ class AddressHeader:
|
||||||
@property
|
@property
|
||||||
def addresses(self):
|
def addresses(self):
|
||||||
if self._addresses is None:
|
if self._addresses is None:
|
||||||
self._addresses = tuple([address for group in self._groups
|
self._addresses = tuple(address for group in self._groups
|
||||||
for address in group.addresses])
|
for address in group.addresses)
|
||||||
return self._addresses
|
return self._addresses
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -389,7 +389,7 @@ def classify_class_attrs(cls):
|
||||||
|
|
||||||
mro = getmro(cls)
|
mro = getmro(cls)
|
||||||
metamro = getmro(type(cls)) # for attributes stored in the metaclass
|
metamro = getmro(type(cls)) # for attributes stored in the metaclass
|
||||||
metamro = tuple([cls for cls in metamro if cls not in (type, object)])
|
metamro = tuple(cls for cls in metamro if cls not in (type, object))
|
||||||
class_bases = (cls,) + mro
|
class_bases = (cls,) + mro
|
||||||
all_bases = class_bases + metamro
|
all_bases = class_bases + metamro
|
||||||
names = dir(cls)
|
names = dir(cls)
|
||||||
|
|
|
@ -463,7 +463,7 @@ class BaseConfigurator(object):
|
||||||
c = self.resolve(c)
|
c = self.resolve(c)
|
||||||
props = config.pop('.', None)
|
props = config.pop('.', None)
|
||||||
# Check for valid identifiers
|
# Check for valid identifiers
|
||||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||||
result = c(**kwargs)
|
result = c(**kwargs)
|
||||||
if props:
|
if props:
|
||||||
for name, value in props.items():
|
for name, value in props.items():
|
||||||
|
@ -726,7 +726,7 @@ class DictConfigurator(BaseConfigurator):
|
||||||
config['address'] = self.as_tuple(config['address'])
|
config['address'] = self.as_tuple(config['address'])
|
||||||
factory = klass
|
factory = klass
|
||||||
props = config.pop('.', None)
|
props = config.pop('.', None)
|
||||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||||
try:
|
try:
|
||||||
result = factory(**kwargs)
|
result = factory(**kwargs)
|
||||||
except TypeError as te:
|
except TypeError as te:
|
||||||
|
|
|
@ -261,7 +261,7 @@ class DefaultContext(BaseContext):
|
||||||
else:
|
else:
|
||||||
return ['fork', 'spawn']
|
return ['fork', 'spawn']
|
||||||
|
|
||||||
DefaultContext.__all__ = list(x for x in dir(DefaultContext) if x[0] != '_')
|
DefaultContext.__all__ = [x for x in dir(DefaultContext) if x[0] != '_']
|
||||||
|
|
||||||
#
|
#
|
||||||
# Context types for fixed start method
|
# Context types for fixed start method
|
||||||
|
|
|
@ -98,8 +98,7 @@ class ForkServer(object):
|
||||||
if self._preload_modules:
|
if self._preload_modules:
|
||||||
desired_keys = {'main_path', 'sys_path'}
|
desired_keys = {'main_path', 'sys_path'}
|
||||||
data = spawn.get_preparation_data('ignore')
|
data = spawn.get_preparation_data('ignore')
|
||||||
data = dict((x,y) for (x,y) in data.items()
|
data = {x: y for x, y in data.items() if x in desired_keys}
|
||||||
if x in desired_keys)
|
|
||||||
else:
|
else:
|
||||||
data = {}
|
data = {}
|
||||||
|
|
||||||
|
|
|
@ -115,7 +115,7 @@ def synchronized(obj, lock=None, ctx=None):
|
||||||
scls = class_cache[cls]
|
scls = class_cache[cls]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
names = [field[0] for field in cls._fields_]
|
names = [field[0] for field in cls._fields_]
|
||||||
d = dict((name, make_property(name)) for name in names)
|
d = {name: make_property(name) for name in names}
|
||||||
classname = 'Synchronized' + cls.__name__
|
classname = 'Synchronized' + cls.__name__
|
||||||
scls = class_cache[cls] = type(classname, (SynchronizedBase,), d)
|
scls = class_cache[cls] = type(classname, (SynchronizedBase,), d)
|
||||||
return scls(obj, lock, ctx)
|
return scls(obj, lock, ctx)
|
||||||
|
|
|
@ -114,10 +114,7 @@ class _WindowsFlavour(_Flavour):
|
||||||
|
|
||||||
is_supported = (os.name == 'nt')
|
is_supported = (os.name == 'nt')
|
||||||
|
|
||||||
drive_letters = (
|
drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
|
||||||
set(chr(x) for x in range(ord('a'), ord('z') + 1)) |
|
|
||||||
set(chr(x) for x in range(ord('A'), ord('Z') + 1))
|
|
||||||
)
|
|
||||||
ext_namespace_prefix = '\\\\?\\'
|
ext_namespace_prefix = '\\\\?\\'
|
||||||
|
|
||||||
reserved_names = (
|
reserved_names = (
|
||||||
|
|
|
@ -500,8 +500,7 @@ def add_callers(target, source):
|
||||||
if func in new_callers:
|
if func in new_callers:
|
||||||
if isinstance(caller, tuple):
|
if isinstance(caller, tuple):
|
||||||
# format used by cProfile
|
# format used by cProfile
|
||||||
new_callers[func] = tuple([i[0] + i[1] for i in
|
new_callers[func] = tuple(i[0] + i[1] for i in zip(caller, new_callers[func]))
|
||||||
zip(caller, new_callers[func])])
|
|
||||||
else:
|
else:
|
||||||
# format used by profile
|
# format used by profile
|
||||||
new_callers[func] += caller
|
new_callers[func] += caller
|
||||||
|
|
|
@ -119,8 +119,8 @@ class Function(SymbolTable):
|
||||||
__globals = None
|
__globals = None
|
||||||
|
|
||||||
def __idents_matching(self, test_func):
|
def __idents_matching(self, test_func):
|
||||||
return tuple([ident for ident in self.get_identifiers()
|
return tuple(ident for ident in self.get_identifiers()
|
||||||
if test_func(self._table.symbols[ident])])
|
if test_func(self._table.symbols[ident]))
|
||||||
|
|
||||||
def get_parameters(self):
|
def get_parameters(self):
|
||||||
if self.__params is None:
|
if self.__params is None:
|
||||||
|
|
|
@ -142,7 +142,7 @@ def _all_string_prefixes():
|
||||||
# 'rf'). The various permutations will be generated.
|
# 'rf'). The various permutations will be generated.
|
||||||
_valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
|
_valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
|
||||||
# if we add binary f-strings, add: ['fb', 'fbr']
|
# if we add binary f-strings, add: ['fb', 'fbr']
|
||||||
result = set([''])
|
result = {''}
|
||||||
for prefix in _valid_string_prefixes:
|
for prefix in _valid_string_prefixes:
|
||||||
for t in _itertools.permutations(prefix):
|
for t in _itertools.permutations(prefix):
|
||||||
# create a list with upper and lower versions of each
|
# create a list with upper and lower versions of each
|
||||||
|
|
|
@ -253,8 +253,7 @@ class FrameSummary:
|
||||||
self._line = line
|
self._line = line
|
||||||
if lookup_line:
|
if lookup_line:
|
||||||
self.line
|
self.line
|
||||||
self.locals = \
|
self.locals = {k: repr(v) for k, v in locals.items()} if locals else None
|
||||||
dict((k, repr(v)) for k, v in locals.items()) if locals else None
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
if isinstance(other, FrameSummary):
|
if isinstance(other, FrameSummary):
|
||||||
|
|
|
@ -1175,7 +1175,7 @@ class TurtleScreen(TurtleScreenBase):
|
||||||
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
|
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
|
||||||
else:
|
else:
|
||||||
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
|
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
|
||||||
return tuple([c * self._colormode/255 for c in cl])
|
return tuple(c * self._colormode/255 for c in cl)
|
||||||
|
|
||||||
def colormode(self, cmode=None):
|
def colormode(self, cmode=None):
|
||||||
"""Return the colormode or set it to 1.0 or 255.
|
"""Return the colormode or set it to 1.0 or 255.
|
||||||
|
@ -2989,7 +2989,7 @@ class RawTurtle(TPen, TNavigator):
|
||||||
t11, t12, t21, t22 = l, 0, 0, l
|
t11, t12, t21, t22 = l, 0, 0, l
|
||||||
elif self._resizemode == "noresize":
|
elif self._resizemode == "noresize":
|
||||||
return polygon
|
return polygon
|
||||||
return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon])
|
return tuple((t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon)
|
||||||
|
|
||||||
def _drawturtle(self):
|
def _drawturtle(self):
|
||||||
"""Manages the correct rendering of the turtle with respect to
|
"""Manages the correct rendering of the turtle with respect to
|
||||||
|
@ -3839,8 +3839,8 @@ def write_docstringdict(filename="turtle_docstringdict"):
|
||||||
docsdict[key] = eval(key).__doc__
|
docsdict[key] = eval(key).__doc__
|
||||||
|
|
||||||
with open("%s.py" % filename,"w") as f:
|
with open("%s.py" % filename,"w") as f:
|
||||||
keys = sorted([x for x in docsdict.keys()
|
keys = sorted(x for x in docsdict.keys()
|
||||||
if x.split('.')[1] not in _alias_list])
|
if x.split('.')[1] not in _alias_list)
|
||||||
f.write('docsdict = {\n\n')
|
f.write('docsdict = {\n\n')
|
||||||
for key in keys[:-1]:
|
for key in keys[:-1]:
|
||||||
f.write('%s :\n' % repr(key))
|
f.write('%s :\n' % repr(key))
|
||||||
|
|
|
@ -52,7 +52,7 @@ def main():
|
||||||
sleep(1)
|
sleep(1)
|
||||||
|
|
||||||
at = clock()
|
at = clock()
|
||||||
while any([t.undobufferentries() for t in s.turtles()]):
|
while any(t.undobufferentries() for t in s.turtles()):
|
||||||
for t in s.turtles():
|
for t in s.turtles():
|
||||||
t.undo()
|
t.undo()
|
||||||
et = clock()
|
et = clock()
|
||||||
|
|
|
@ -683,8 +683,8 @@ class HTTPRedirectHandler(BaseHandler):
|
||||||
newurl = newurl.replace(' ', '%20')
|
newurl = newurl.replace(' ', '%20')
|
||||||
|
|
||||||
CONTENT_HEADERS = ("content-length", "content-type")
|
CONTENT_HEADERS = ("content-length", "content-type")
|
||||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
newheaders = {k: v for k, v in req.headers.items()
|
||||||
if k.lower() not in CONTENT_HEADERS)
|
if k.lower() not in CONTENT_HEADERS}
|
||||||
return Request(newurl,
|
return Request(newurl,
|
||||||
headers=newheaders,
|
headers=newheaders,
|
||||||
origin_req_host=req.origin_req_host,
|
origin_req_host=req.origin_req_host,
|
||||||
|
@ -845,7 +845,7 @@ class HTTPPasswordMgr:
|
||||||
self.passwd[realm] = {}
|
self.passwd[realm] = {}
|
||||||
for default_port in True, False:
|
for default_port in True, False:
|
||||||
reduced_uri = tuple(
|
reduced_uri = tuple(
|
||||||
[self.reduce_uri(u, default_port) for u in uri])
|
self.reduce_uri(u, default_port) for u in uri)
|
||||||
self.passwd[realm][reduced_uri] = (user, passwd)
|
self.passwd[realm][reduced_uri] = (user, passwd)
|
||||||
|
|
||||||
def find_user_password(self, realm, authuri):
|
def find_user_password(self, realm, authuri):
|
||||||
|
@ -1286,8 +1286,7 @@ class AbstractHTTPHandler(BaseHandler):
|
||||||
h.set_debuglevel(self._debuglevel)
|
h.set_debuglevel(self._debuglevel)
|
||||||
|
|
||||||
headers = dict(req.unredirected_hdrs)
|
headers = dict(req.unredirected_hdrs)
|
||||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
headers.update((k, v) for k, v in req.headers.items() if k not in headers)
|
||||||
if k not in headers))
|
|
||||||
|
|
||||||
# TODO(jhylton): Should this be redesigned to handle
|
# TODO(jhylton): Should this be redesigned to handle
|
||||||
# persistent connections?
|
# persistent connections?
|
||||||
|
@ -1299,7 +1298,7 @@ class AbstractHTTPHandler(BaseHandler):
|
||||||
# So make sure the connection gets closed after the (only)
|
# So make sure the connection gets closed after the (only)
|
||||||
# request.
|
# request.
|
||||||
headers["Connection"] = "close"
|
headers["Connection"] = "close"
|
||||||
headers = dict((name.title(), val) for name, val in headers.items())
|
headers = {name.title(): val for name, val in headers.items()}
|
||||||
|
|
||||||
if req._tunnel_host:
|
if req._tunnel_host:
|
||||||
tunnel_headers = {}
|
tunnel_headers = {}
|
||||||
|
|
|
@ -1097,8 +1097,8 @@ class PyTupleObjectPtr(PyObjectPtr):
|
||||||
return ProxyAlreadyVisited('(...)')
|
return ProxyAlreadyVisited('(...)')
|
||||||
visited.add(self.as_address())
|
visited.add(self.as_address())
|
||||||
|
|
||||||
result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
|
result = tuple(PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
|
||||||
for i in safe_range(int_from_int(self.field('ob_size')))])
|
for i in safe_range(int_from_int(self.field('ob_size'))))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def write_repr(self, out, visited):
|
def write_repr(self, out, visited):
|
||||||
|
|
|
@ -83,7 +83,7 @@ class Stats:
|
||||||
columns.update(self.stats[ext])
|
columns.update(self.stats[ext])
|
||||||
cols = sorted(columns)
|
cols = sorted(columns)
|
||||||
colwidth = {}
|
colwidth = {}
|
||||||
colwidth["ext"] = max([len(ext) for ext in exts])
|
colwidth["ext"] = max(map(len, exts))
|
||||||
minwidth = 6
|
minwidth = 6
|
||||||
self.stats["TOTAL"] = {}
|
self.stats["TOTAL"] = {}
|
||||||
for col in cols:
|
for col in cols:
|
||||||
|
|
|
@ -609,7 +609,7 @@ def makeunicodename(unicode, trace):
|
||||||
if name and name[0] != "<":
|
if name and name[0] != "<":
|
||||||
names[char] = name + chr(0)
|
names[char] = name + chr(0)
|
||||||
|
|
||||||
print(len(list(n for n in names if n is not None)), "distinct names")
|
print(len([n for n in names if n is not None]), "distinct names")
|
||||||
|
|
||||||
# collect unique words from names (note that we differ between
|
# collect unique words from names (note that we differ between
|
||||||
# words inside a sentence, and words ending a sentence. the
|
# words inside a sentence, and words ending a sentence. the
|
||||||
|
|
Loading…
Reference in New Issue