bpo-30296 Remove unnecessary tuples, lists, sets, and dicts (#1489)
* Replaced list(<generator expression>) with list comprehension * Replaced dict(<generator expression>) with dict comprehension * Replaced set(<list literal>) with set literal * Replaced builtin func(<list comprehension>) with func(<generator expression>) when supported (e.g. any(), all(), tuple(), min(), & max())
This commit is contained in:
parent
906f5330b9
commit
3972628de3
|
@ -157,19 +157,19 @@ class WeakSet:
|
|||
__le__ = issubset
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.data < set(ref(item) for item in other)
|
||||
return self.data < set(map(ref, other))
|
||||
|
||||
def issuperset(self, other):
|
||||
return self.data.issuperset(ref(item) for item in other)
|
||||
__ge__ = issuperset
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.data > set(ref(item) for item in other)
|
||||
return self.data > set(map(ref, other))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, self.__class__):
|
||||
return NotImplemented
|
||||
return self.data == set(ref(item) for item in other)
|
||||
return self.data == set(map(ref, other))
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
newset = self.copy()
|
||||
|
|
|
@ -255,7 +255,7 @@ def query_vcvarsall(version, arch="x86"):
|
|||
"""Launch vcvarsall.bat and read the settings from its environment
|
||||
"""
|
||||
vcvarsall = find_vcvarsall(version)
|
||||
interesting = set(("include", "lib", "libpath", "path"))
|
||||
interesting = {"include", "lib", "libpath", "path"}
|
||||
result = {}
|
||||
|
||||
if vcvarsall is None:
|
||||
|
|
|
@ -369,8 +369,8 @@ class AddressHeader:
|
|||
@property
|
||||
def addresses(self):
|
||||
if self._addresses is None:
|
||||
self._addresses = tuple([address for group in self._groups
|
||||
for address in group.addresses])
|
||||
self._addresses = tuple(address for group in self._groups
|
||||
for address in group.addresses)
|
||||
return self._addresses
|
||||
|
||||
|
||||
|
|
|
@ -389,7 +389,7 @@ def classify_class_attrs(cls):
|
|||
|
||||
mro = getmro(cls)
|
||||
metamro = getmro(type(cls)) # for attributes stored in the metaclass
|
||||
metamro = tuple([cls for cls in metamro if cls not in (type, object)])
|
||||
metamro = tuple(cls for cls in metamro if cls not in (type, object))
|
||||
class_bases = (cls,) + mro
|
||||
all_bases = class_bases + metamro
|
||||
names = dir(cls)
|
||||
|
|
|
@ -463,7 +463,7 @@ class BaseConfigurator(object):
|
|||
c = self.resolve(c)
|
||||
props = config.pop('.', None)
|
||||
# Check for valid identifiers
|
||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
result = c(**kwargs)
|
||||
if props:
|
||||
for name, value in props.items():
|
||||
|
@ -726,7 +726,7 @@ class DictConfigurator(BaseConfigurator):
|
|||
config['address'] = self.as_tuple(config['address'])
|
||||
factory = klass
|
||||
props = config.pop('.', None)
|
||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
try:
|
||||
result = factory(**kwargs)
|
||||
except TypeError as te:
|
||||
|
|
|
@ -261,7 +261,7 @@ class DefaultContext(BaseContext):
|
|||
else:
|
||||
return ['fork', 'spawn']
|
||||
|
||||
DefaultContext.__all__ = list(x for x in dir(DefaultContext) if x[0] != '_')
|
||||
DefaultContext.__all__ = [x for x in dir(DefaultContext) if x[0] != '_']
|
||||
|
||||
#
|
||||
# Context types for fixed start method
|
||||
|
|
|
@ -98,8 +98,7 @@ class ForkServer(object):
|
|||
if self._preload_modules:
|
||||
desired_keys = {'main_path', 'sys_path'}
|
||||
data = spawn.get_preparation_data('ignore')
|
||||
data = dict((x,y) for (x,y) in data.items()
|
||||
if x in desired_keys)
|
||||
data = {x: y for x, y in data.items() if x in desired_keys}
|
||||
else:
|
||||
data = {}
|
||||
|
||||
|
|
|
@ -115,7 +115,7 @@ def synchronized(obj, lock=None, ctx=None):
|
|||
scls = class_cache[cls]
|
||||
except KeyError:
|
||||
names = [field[0] for field in cls._fields_]
|
||||
d = dict((name, make_property(name)) for name in names)
|
||||
d = {name: make_property(name) for name in names}
|
||||
classname = 'Synchronized' + cls.__name__
|
||||
scls = class_cache[cls] = type(classname, (SynchronizedBase,), d)
|
||||
return scls(obj, lock, ctx)
|
||||
|
|
|
@ -114,10 +114,7 @@ class _WindowsFlavour(_Flavour):
|
|||
|
||||
is_supported = (os.name == 'nt')
|
||||
|
||||
drive_letters = (
|
||||
set(chr(x) for x in range(ord('a'), ord('z') + 1)) |
|
||||
set(chr(x) for x in range(ord('A'), ord('Z') + 1))
|
||||
)
|
||||
drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ')
|
||||
ext_namespace_prefix = '\\\\?\\'
|
||||
|
||||
reserved_names = (
|
||||
|
|
|
@ -500,8 +500,7 @@ def add_callers(target, source):
|
|||
if func in new_callers:
|
||||
if isinstance(caller, tuple):
|
||||
# format used by cProfile
|
||||
new_callers[func] = tuple([i[0] + i[1] for i in
|
||||
zip(caller, new_callers[func])])
|
||||
new_callers[func] = tuple(i[0] + i[1] for i in zip(caller, new_callers[func]))
|
||||
else:
|
||||
# format used by profile
|
||||
new_callers[func] += caller
|
||||
|
|
|
@ -119,8 +119,8 @@ class Function(SymbolTable):
|
|||
__globals = None
|
||||
|
||||
def __idents_matching(self, test_func):
|
||||
return tuple([ident for ident in self.get_identifiers()
|
||||
if test_func(self._table.symbols[ident])])
|
||||
return tuple(ident for ident in self.get_identifiers()
|
||||
if test_func(self._table.symbols[ident]))
|
||||
|
||||
def get_parameters(self):
|
||||
if self.__params is None:
|
||||
|
|
|
@ -142,7 +142,7 @@ def _all_string_prefixes():
|
|||
# 'rf'). The various permutations will be generated.
|
||||
_valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
|
||||
# if we add binary f-strings, add: ['fb', 'fbr']
|
||||
result = set([''])
|
||||
result = {''}
|
||||
for prefix in _valid_string_prefixes:
|
||||
for t in _itertools.permutations(prefix):
|
||||
# create a list with upper and lower versions of each
|
||||
|
|
|
@ -253,8 +253,7 @@ class FrameSummary:
|
|||
self._line = line
|
||||
if lookup_line:
|
||||
self.line
|
||||
self.locals = \
|
||||
dict((k, repr(v)) for k, v in locals.items()) if locals else None
|
||||
self.locals = {k: repr(v) for k, v in locals.items()} if locals else None
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, FrameSummary):
|
||||
|
|
|
@ -1175,7 +1175,7 @@ class TurtleScreen(TurtleScreenBase):
|
|||
cl = [16*int(cstr[h], 16) for h in cstr[1:]]
|
||||
else:
|
||||
raise TurtleGraphicsError("bad colorstring: %s" % cstr)
|
||||
return tuple([c * self._colormode/255 for c in cl])
|
||||
return tuple(c * self._colormode/255 for c in cl)
|
||||
|
||||
def colormode(self, cmode=None):
|
||||
"""Return the colormode or set it to 1.0 or 255.
|
||||
|
@ -2989,7 +2989,7 @@ class RawTurtle(TPen, TNavigator):
|
|||
t11, t12, t21, t22 = l, 0, 0, l
|
||||
elif self._resizemode == "noresize":
|
||||
return polygon
|
||||
return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon])
|
||||
return tuple((t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon)
|
||||
|
||||
def _drawturtle(self):
|
||||
"""Manages the correct rendering of the turtle with respect to
|
||||
|
@ -3839,8 +3839,8 @@ def write_docstringdict(filename="turtle_docstringdict"):
|
|||
docsdict[key] = eval(key).__doc__
|
||||
|
||||
with open("%s.py" % filename,"w") as f:
|
||||
keys = sorted([x for x in docsdict.keys()
|
||||
if x.split('.')[1] not in _alias_list])
|
||||
keys = sorted(x for x in docsdict.keys()
|
||||
if x.split('.')[1] not in _alias_list)
|
||||
f.write('docsdict = {\n\n')
|
||||
for key in keys[:-1]:
|
||||
f.write('%s :\n' % repr(key))
|
||||
|
|
|
@ -52,7 +52,7 @@ def main():
|
|||
sleep(1)
|
||||
|
||||
at = clock()
|
||||
while any([t.undobufferentries() for t in s.turtles()]):
|
||||
while any(t.undobufferentries() for t in s.turtles()):
|
||||
for t in s.turtles():
|
||||
t.undo()
|
||||
et = clock()
|
||||
|
|
|
@ -683,8 +683,8 @@ class HTTPRedirectHandler(BaseHandler):
|
|||
newurl = newurl.replace(' ', '%20')
|
||||
|
||||
CONTENT_HEADERS = ("content-length", "content-type")
|
||||
newheaders = dict((k, v) for k, v in req.headers.items()
|
||||
if k.lower() not in CONTENT_HEADERS)
|
||||
newheaders = {k: v for k, v in req.headers.items()
|
||||
if k.lower() not in CONTENT_HEADERS}
|
||||
return Request(newurl,
|
||||
headers=newheaders,
|
||||
origin_req_host=req.origin_req_host,
|
||||
|
@ -845,7 +845,7 @@ class HTTPPasswordMgr:
|
|||
self.passwd[realm] = {}
|
||||
for default_port in True, False:
|
||||
reduced_uri = tuple(
|
||||
[self.reduce_uri(u, default_port) for u in uri])
|
||||
self.reduce_uri(u, default_port) for u in uri)
|
||||
self.passwd[realm][reduced_uri] = (user, passwd)
|
||||
|
||||
def find_user_password(self, realm, authuri):
|
||||
|
@ -1286,8 +1286,7 @@ class AbstractHTTPHandler(BaseHandler):
|
|||
h.set_debuglevel(self._debuglevel)
|
||||
|
||||
headers = dict(req.unredirected_hdrs)
|
||||
headers.update(dict((k, v) for k, v in req.headers.items()
|
||||
if k not in headers))
|
||||
headers.update((k, v) for k, v in req.headers.items() if k not in headers)
|
||||
|
||||
# TODO(jhylton): Should this be redesigned to handle
|
||||
# persistent connections?
|
||||
|
@ -1299,7 +1298,7 @@ class AbstractHTTPHandler(BaseHandler):
|
|||
# So make sure the connection gets closed after the (only)
|
||||
# request.
|
||||
headers["Connection"] = "close"
|
||||
headers = dict((name.title(), val) for name, val in headers.items())
|
||||
headers = {name.title(): val for name, val in headers.items()}
|
||||
|
||||
if req._tunnel_host:
|
||||
tunnel_headers = {}
|
||||
|
|
|
@ -1097,8 +1097,8 @@ class PyTupleObjectPtr(PyObjectPtr):
|
|||
return ProxyAlreadyVisited('(...)')
|
||||
visited.add(self.as_address())
|
||||
|
||||
result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
|
||||
for i in safe_range(int_from_int(self.field('ob_size')))])
|
||||
result = tuple(PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited)
|
||||
for i in safe_range(int_from_int(self.field('ob_size'))))
|
||||
return result
|
||||
|
||||
def write_repr(self, out, visited):
|
||||
|
|
|
@ -83,7 +83,7 @@ class Stats:
|
|||
columns.update(self.stats[ext])
|
||||
cols = sorted(columns)
|
||||
colwidth = {}
|
||||
colwidth["ext"] = max([len(ext) for ext in exts])
|
||||
colwidth["ext"] = max(map(len, exts))
|
||||
minwidth = 6
|
||||
self.stats["TOTAL"] = {}
|
||||
for col in cols:
|
||||
|
|
|
@ -609,7 +609,7 @@ def makeunicodename(unicode, trace):
|
|||
if name and name[0] != "<":
|
||||
names[char] = name + chr(0)
|
||||
|
||||
print(len(list(n for n in names if n is not None)), "distinct names")
|
||||
print(len([n for n in names if n is not None]), "distinct names")
|
||||
|
||||
# collect unique words from names (note that we differ between
|
||||
# words inside a sentence, and words ending a sentence. the
|
||||
|
|
Loading…
Reference in New Issue