mirror of https://github.com/python/cpython
SF 633560: tokenize.__all__ needs "generate_tokens"
This commit is contained in:
parent
a326f47a13
commit
78a7aeeb1a
|
@ -30,7 +30,8 @@ import string, re
|
||||||
from token import *
|
from token import *
|
||||||
|
|
||||||
import token
|
import token
|
||||||
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"]
|
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
|
||||||
|
"generate_tokens", "NL"]
|
||||||
del x
|
del x
|
||||||
del token
|
del token
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue