mirror of https://github.com/python/cpython
bpo-43014: Improve performance of tokenize.tokenize by 20-30%
This commit is contained in:
parent
bf9239bb61
commit
15bd9efd01
|
@ -27,6 +27,7 @@ __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
|
|||
from builtins import open as _builtin_open
|
||||
from codecs import lookup, BOM_UTF8
|
||||
import collections
|
||||
import functools
|
||||
from io import TextIOWrapper
|
||||
import itertools as _itertools
|
||||
import re
|
||||
|
@ -95,6 +96,7 @@ def _all_string_prefixes():
|
|||
result.add(''.join(u))
|
||||
return result
|
||||
|
||||
@functools.lru_cache
|
||||
def _compile(expr):
|
||||
return re.compile(expr, re.UNICODE)
|
||||
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
Improve performance of :mod:`tokenize` by 20-30%. Patch by Anthony Sottile.
|
Loading…
Reference in New Issue