SF 633560: tokenize.__all__ needs "generate_tokens"

This commit is contained in:
Raymond Hettinger 2002-11-05 06:06:02 +00:00
parent a326f47a13
commit 78a7aeeb1a
1 changed files with 2 additions and 1 deletions

View File

@ -30,7 +30,8 @@ import string, re
from token import *
import token
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"]
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
"generate_tokens", "NL"]
del x
del token