diff --git a/Lib/multiprocessing/context.py b/Lib/multiprocessing/context.py index 8d0525d5d62..b1960ea296f 100644 --- a/Lib/multiprocessing/context.py +++ b/Lib/multiprocessing/context.py @@ -223,6 +223,10 @@ class Process(process.BaseProcess): def _Popen(process_obj): return _default_context.get_context().Process._Popen(process_obj) + @staticmethod + def _after_fork(): + return _default_context.get_context().Process._after_fork() + class DefaultContext(BaseContext): Process = Process @@ -283,6 +287,11 @@ if sys.platform != 'win32': from .popen_spawn_posix import Popen return Popen(process_obj) + @staticmethod + def _after_fork(): + # process is spawned, nothing to do + pass + class ForkServerProcess(process.BaseProcess): _start_method = 'forkserver' @staticmethod @@ -326,6 +335,11 @@ else: from .popen_spawn_win32 import Popen return Popen(process_obj) + @staticmethod + def _after_fork(): + # process is spawned, nothing to do + pass + class SpawnContext(BaseContext): _name = 'spawn' Process = SpawnProcess diff --git a/Lib/multiprocessing/process.py b/Lib/multiprocessing/process.py index 3917d2e4fa6..c03c859baa7 100644 --- a/Lib/multiprocessing/process.py +++ b/Lib/multiprocessing/process.py @@ -304,8 +304,7 @@ class BaseProcess(object): if threading._HAVE_THREAD_NATIVE_ID: threading.main_thread()._set_native_id() try: - util._finalizer_registry.clear() - util._run_after_forkers() + self._after_fork() finally: # delay finalization of the old process object until after # _run_after_forkers() is executed @@ -336,6 +335,13 @@ class BaseProcess(object): return exitcode + @staticmethod + def _after_fork(): + from . import util + util._finalizer_registry.clear() + util._run_after_forkers() + + # # We subclass bytes to avoid accidental transmission of auth keys over network # diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 67bb17c0ede..4a588d96deb 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -5,6 +5,7 @@ import unittest import unittest.mock import queue as pyqueue +import textwrap import time import io import itertools @@ -5760,6 +5761,35 @@ class TestSyncManagerTypes(unittest.TestCase): self.run_worker(self._test_namespace, o) +class TestNamedResource(unittest.TestCase): + def test_global_named_resource_spawn(self): + # + # gh-90549: Check that global named resources in main module + # will not leak by a subprocess, in spawn context. + # + testfn = os_helper.TESTFN + self.addCleanup(os_helper.unlink, testfn) + with open(testfn, 'w', encoding='utf-8') as f: + f.write(textwrap.dedent('''\ + import multiprocessing as mp + + ctx = mp.get_context('spawn') + + global_resource = ctx.Semaphore() + + def submain(): pass + + if __name__ == '__main__': + p = ctx.Process(target=submain) + p.start() + p.join() + ''')) + rc, out, err = test.support.script_helper.assert_python_ok(testfn) + # on error, err = 'UserWarning: resource_tracker: There appear to + # be 1 leaked semaphore objects to clean up at shutdown' + self.assertEqual(err, b'') + + class MiscTestCase(unittest.TestCase): def test__all__(self): # Just make sure names in not_exported are excluded diff --git a/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst b/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst new file mode 100644 index 00000000000..6ebdc394900 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2022-06-07-14-53-46.gh-issue-90549.T4FMKY.rst @@ -0,0 +1,2 @@ +Fix a multiprocessing bug where a global named resource (such as a semaphore) +could leak when a child process is spawned (as opposed to forked).