From e9b5399bee7106beeeb38a45cfef3f0ed3fdd703 Mon Sep 17 00:00:00 2001 From: Nikita Sobolev Date: Thu, 19 Oct 2023 11:29:45 +0300 Subject: [PATCH] gh-111031: Check more files in `test_tokenize` (#111032) --- Lib/test/test_tokenize.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 41b9ebe3374..290f4608c5e 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1901,19 +1901,9 @@ class TestRoundtrip(TestCase): tempdir = os.path.dirname(__file__) or os.curdir testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py")) - # Tokenize is broken on test_pep3131.py because regular expressions are - # broken on the obscure unicode identifiers in it. *sigh* - # With roundtrip extended to test the 5-tuple mode of untokenize, - # 7 more testfiles fail. Remove them also until the failure is diagnosed. - - testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py")) - # TODO: Remove this once we can untokenize PEP 701 syntax testfiles.remove(os.path.join(tempdir, "test_fstring.py")) - for f in ('buffer', 'builtin', 'fileio', 'os', 'platform', 'sys'): - testfiles.remove(os.path.join(tempdir, "test_%s.py") % f) - if not support.is_resource_enabled("cpu"): testfiles = random.sample(testfiles, 10)