From 3972628de3d569c88451a2a176a1c94d8822b8a6 Mon Sep 17 00:00:00 2001 From: Jon Dufresne Date: Thu, 18 May 2017 07:35:54 -0700 Subject: [PATCH] bpo-30296 Remove unnecessary tuples, lists, sets, and dicts (#1489) * Replaced list() with list comprehension * Replaced dict() with dict comprehension * Replaced set() with set literal * Replaced builtin func() with func() when supported (e.g. any(), all(), tuple(), min(), & max()) --- Lib/_weakrefset.py | 6 +++--- Lib/distutils/msvc9compiler.py | 2 +- Lib/email/headerregistry.py | 4 ++-- Lib/inspect.py | 2 +- Lib/logging/config.py | 4 ++-- Lib/multiprocessing/context.py | 2 +- Lib/multiprocessing/forkserver.py | 3 +-- Lib/multiprocessing/sharedctypes.py | 2 +- Lib/pathlib.py | 5 +---- Lib/pstats.py | 3 +-- Lib/symtable.py | 4 ++-- Lib/tokenize.py | 2 +- Lib/traceback.py | 3 +-- Lib/turtle.py | 8 ++++---- Lib/turtledemo/wikipedia.py | 2 +- Lib/urllib/request.py | 11 +++++------ Tools/gdb/libpython.py | 4 ++-- Tools/scripts/byext.py | 2 +- Tools/unicode/makeunicodedata.py | 2 +- 19 files changed, 32 insertions(+), 39 deletions(-) diff --git a/Lib/_weakrefset.py b/Lib/_weakrefset.py index 4d0de8ce753..304c66f59bd 100644 --- a/Lib/_weakrefset.py +++ b/Lib/_weakrefset.py @@ -157,19 +157,19 @@ class WeakSet: __le__ = issubset def __lt__(self, other): - return self.data < set(ref(item) for item in other) + return self.data < set(map(ref, other)) def issuperset(self, other): return self.data.issuperset(ref(item) for item in other) __ge__ = issuperset def __gt__(self, other): - return self.data > set(ref(item) for item in other) + return self.data > set(map(ref, other)) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented - return self.data == set(ref(item) for item in other) + return self.data == set(map(ref, other)) def symmetric_difference(self, other): newset = self.copy() diff --git a/Lib/distutils/msvc9compiler.py b/Lib/distutils/msvc9compiler.py index 21191276227..c401ddc86eb 100644 --- a/Lib/distutils/msvc9compiler.py +++ b/Lib/distutils/msvc9compiler.py @@ -255,7 +255,7 @@ def query_vcvarsall(version, arch="x86"): """Launch vcvarsall.bat and read the settings from its environment """ vcvarsall = find_vcvarsall(version) - interesting = set(("include", "lib", "libpath", "path")) + interesting = {"include", "lib", "libpath", "path"} result = {} if vcvarsall is None: diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py index 0fc2231e5cb..81fee146dcc 100644 --- a/Lib/email/headerregistry.py +++ b/Lib/email/headerregistry.py @@ -369,8 +369,8 @@ class AddressHeader: @property def addresses(self): if self._addresses is None: - self._addresses = tuple([address for group in self._groups - for address in group.addresses]) + self._addresses = tuple(address for group in self._groups + for address in group.addresses) return self._addresses diff --git a/Lib/inspect.py b/Lib/inspect.py index 3317f58f475..9c072eb0747 100644 --- a/Lib/inspect.py +++ b/Lib/inspect.py @@ -389,7 +389,7 @@ def classify_class_attrs(cls): mro = getmro(cls) metamro = getmro(type(cls)) # for attributes stored in the metaclass - metamro = tuple([cls for cls in metamro if cls not in (type, object)]) + metamro = tuple(cls for cls in metamro if cls not in (type, object)) class_bases = (cls,) + mro all_bases = class_bases + metamro names = dir(cls) diff --git a/Lib/logging/config.py b/Lib/logging/config.py index 917178e5118..d692514adfe 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -463,7 +463,7 @@ class BaseConfigurator(object): c = self.resolve(c) props = config.pop('.', None) # Check for valid identifiers - kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) result = c(**kwargs) if props: for name, value in props.items(): @@ -726,7 +726,7 @@ class DictConfigurator(BaseConfigurator): config['address'] = self.as_tuple(config['address']) factory = klass props = config.pop('.', None) - kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) try: result = factory(**kwargs) except TypeError as te: diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index a3d491bde5a..c98ee434249 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -261,7 +261,7 @@ class DefaultContext(BaseContext): else: return ['fork', 'spawn'] -DefaultContext.__all__ = list(x for x in dir(DefaultContext) if x[0] != '_') +DefaultContext.__all__ = [x for x in dir(DefaultContext) if x[0] != '_'] # # Context types for fixed start method diff --git a/Lib/multiprocessing/forkserver.py b/Lib/multiprocessing/forkserver.py index d5ce6257456..6e095399936 100644 --- a/Lib/multiprocessing/forkserver.py +++ b/Lib/multiprocessing/forkserver.py @@ -98,8 +98,7 @@ class ForkServer(object): if self._preload_modules: desired_keys = {'main_path', 'sys_path'} data = spawn.get_preparation_data('ignore') - data = dict((x,y) for (x,y) in data.items() - if x in desired_keys) + data = {x: y for x, y in data.items() if x in desired_keys} else: data = {} diff --git a/Lib/multiprocessing/sharedctypes.py b/Lib/multiprocessing/sharedctypes.py index 25cbcf2ae4c..7228751770f 100644 --- a/Lib/multiprocessing/sharedctypes.py +++ b/Lib/multiprocessing/sharedctypes.py @@ -115,7 +115,7 @@ def synchronized(obj, lock=None, ctx=None): scls = class_cache[cls] except KeyError: names = [field[0] for field in cls._fields_] - d = dict((name, make_property(name)) for name in names) + d = {name: make_property(name) for name in names} classname = 'Synchronized' + cls.__name__ scls = class_cache[cls] = type(classname, (SynchronizedBase,), d) return scls(obj, lock, ctx) diff --git a/Lib/pathlib.py b/Lib/pathlib.py index 4368eba8a0e..4d89436f775 100644 --- a/Lib/pathlib.py +++ b/Lib/pathlib.py @@ -114,10 +114,7 @@ class _WindowsFlavour(_Flavour): is_supported = (os.name == 'nt') - drive_letters = ( - set(chr(x) for x in range(ord('a'), ord('z') + 1)) | - set(chr(x) for x in range(ord('A'), ord('Z') + 1)) - ) + drive_letters = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ') ext_namespace_prefix = '\\\\?\\' reserved_names = ( diff --git a/Lib/pstats.py b/Lib/pstats.py index b8bcfb23a5f..b7a20542a39 100644 --- a/Lib/pstats.py +++ b/Lib/pstats.py @@ -500,8 +500,7 @@ def add_callers(target, source): if func in new_callers: if isinstance(caller, tuple): # format used by cProfile - new_callers[func] = tuple([i[0] + i[1] for i in - zip(caller, new_callers[func])]) + new_callers[func] = tuple(i[0] + i[1] for i in zip(caller, new_callers[func])) else: # format used by profile new_callers[func] += caller diff --git a/Lib/symtable.py b/Lib/symtable.py index b0e52603dce..c7627a6ef68 100644 --- a/Lib/symtable.py +++ b/Lib/symtable.py @@ -119,8 +119,8 @@ class Function(SymbolTable): __globals = None def __idents_matching(self, test_func): - return tuple([ident for ident in self.get_identifiers() - if test_func(self._table.symbols[ident])]) + return tuple(ident for ident in self.get_identifiers() + if test_func(self._table.symbols[ident])) def get_parameters(self): if self.__params is None: diff --git a/Lib/tokenize.py b/Lib/tokenize.py index eea88b7d432..634662da265 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -142,7 +142,7 @@ def _all_string_prefixes(): # 'rf'). The various permutations will be generated. _valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr'] # if we add binary f-strings, add: ['fb', 'fbr'] - result = set(['']) + result = {''} for prefix in _valid_string_prefixes: for t in _itertools.permutations(prefix): # create a list with upper and lower versions of each diff --git a/Lib/traceback.py b/Lib/traceback.py index 09bda717ad0..fb3bce12a13 100644 --- a/Lib/traceback.py +++ b/Lib/traceback.py @@ -253,8 +253,7 @@ class FrameSummary: self._line = line if lookup_line: self.line - self.locals = \ - dict((k, repr(v)) for k, v in locals.items()) if locals else None + self.locals = {k: repr(v) for k, v in locals.items()} if locals else None def __eq__(self, other): if isinstance(other, FrameSummary): diff --git a/Lib/turtle.py b/Lib/turtle.py index 8036b7faaa8..b2623f16725 100644 --- a/Lib/turtle.py +++ b/Lib/turtle.py @@ -1175,7 +1175,7 @@ class TurtleScreen(TurtleScreenBase): cl = [16*int(cstr[h], 16) for h in cstr[1:]] else: raise TurtleGraphicsError("bad colorstring: %s" % cstr) - return tuple([c * self._colormode/255 for c in cl]) + return tuple(c * self._colormode/255 for c in cl) def colormode(self, cmode=None): """Return the colormode or set it to 1.0 or 255. @@ -2989,7 +2989,7 @@ class RawTurtle(TPen, TNavigator): t11, t12, t21, t22 = l, 0, 0, l elif self._resizemode == "noresize": return polygon - return tuple([(t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon]) + return tuple((t11*x + t12*y, t21*x + t22*y) for (x, y) in polygon) def _drawturtle(self): """Manages the correct rendering of the turtle with respect to @@ -3839,8 +3839,8 @@ def write_docstringdict(filename="turtle_docstringdict"): docsdict[key] = eval(key).__doc__ with open("%s.py" % filename,"w") as f: - keys = sorted([x for x in docsdict.keys() - if x.split('.')[1] not in _alias_list]) + keys = sorted(x for x in docsdict.keys() + if x.split('.')[1] not in _alias_list) f.write('docsdict = {\n\n') for key in keys[:-1]: f.write('%s :\n' % repr(key)) diff --git a/Lib/turtledemo/wikipedia.py b/Lib/turtledemo/wikipedia.py index 0f274420c8f..d6bbad892db 100644 --- a/Lib/turtledemo/wikipedia.py +++ b/Lib/turtledemo/wikipedia.py @@ -52,7 +52,7 @@ def main(): sleep(1) at = clock() - while any([t.undobufferentries() for t in s.turtles()]): + while any(t.undobufferentries() for t in s.turtles()): for t in s.turtles(): t.undo() et = clock() diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 3f8dcfb151d..a192d527d8b 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -683,8 +683,8 @@ class HTTPRedirectHandler(BaseHandler): newurl = newurl.replace(' ', '%20') CONTENT_HEADERS = ("content-length", "content-type") - newheaders = dict((k, v) for k, v in req.headers.items() - if k.lower() not in CONTENT_HEADERS) + newheaders = {k: v for k, v in req.headers.items() + if k.lower() not in CONTENT_HEADERS} return Request(newurl, headers=newheaders, origin_req_host=req.origin_req_host, @@ -845,7 +845,7 @@ class HTTPPasswordMgr: self.passwd[realm] = {} for default_port in True, False: reduced_uri = tuple( - [self.reduce_uri(u, default_port) for u in uri]) + self.reduce_uri(u, default_port) for u in uri) self.passwd[realm][reduced_uri] = (user, passwd) def find_user_password(self, realm, authuri): @@ -1286,8 +1286,7 @@ class AbstractHTTPHandler(BaseHandler): h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) - headers.update(dict((k, v) for k, v in req.headers.items() - if k not in headers)) + headers.update((k, v) for k, v in req.headers.items() if k not in headers) # TODO(jhylton): Should this be redesigned to handle # persistent connections? @@ -1299,7 +1298,7 @@ class AbstractHTTPHandler(BaseHandler): # So make sure the connection gets closed after the (only) # request. headers["Connection"] = "close" - headers = dict((name.title(), val) for name, val in headers.items()) + headers = {name.title(): val for name, val in headers.items()} if req._tunnel_host: tunnel_headers = {} diff --git a/Tools/gdb/libpython.py b/Tools/gdb/libpython.py index 0e9df2bd52f..cc23b8402df 100755 --- a/Tools/gdb/libpython.py +++ b/Tools/gdb/libpython.py @@ -1097,8 +1097,8 @@ class PyTupleObjectPtr(PyObjectPtr): return ProxyAlreadyVisited('(...)') visited.add(self.as_address()) - result = tuple([PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) - for i in safe_range(int_from_int(self.field('ob_size')))]) + result = tuple(PyObjectPtr.from_pyobject_ptr(self[i]).proxyval(visited) + for i in safe_range(int_from_int(self.field('ob_size')))) return result def write_repr(self, out, visited): diff --git a/Tools/scripts/byext.py b/Tools/scripts/byext.py index 736a441761f..a4b2f7ff6d8 100755 --- a/Tools/scripts/byext.py +++ b/Tools/scripts/byext.py @@ -83,7 +83,7 @@ class Stats: columns.update(self.stats[ext]) cols = sorted(columns) colwidth = {} - colwidth["ext"] = max([len(ext) for ext in exts]) + colwidth["ext"] = max(map(len, exts)) minwidth = 6 self.stats["TOTAL"] = {} for col in cols: diff --git a/Tools/unicode/makeunicodedata.py b/Tools/unicode/makeunicodedata.py index 5d8014a5da3..472324fdefa 100644 --- a/Tools/unicode/makeunicodedata.py +++ b/Tools/unicode/makeunicodedata.py @@ -609,7 +609,7 @@ def makeunicodename(unicode, trace): if name and name[0] != "<": names[char] = name + chr(0) - print(len(list(n for n in names if n is not None)), "distinct names") + print(len([n for n in names if n is not None]), "distinct names") # collect unique words from names (note that we differ between # words inside a sentence, and words ending a sentence. the