cpython/Doc/includes/mp_synchronize.py

277 lines
6.1 KiB
Python
Raw Normal View History

#
# A test file for the `multiprocessing` package
#
Merged revisions 67348,67355,67359,67362,67364-67365,67367-67368,67398,67423-67424,67432,67440-67441,67444-67445,67454-67455,67457-67458 via svnmerge from svn+ssh://pythondev@svn.python.org/python/trunk ........ r67348 | benjamin.peterson | 2008-11-22 20:09:41 -0600 (Sat, 22 Nov 2008) | 1 line raise a better error ........ r67355 | georg.brandl | 2008-11-23 13:17:25 -0600 (Sun, 23 Nov 2008) | 2 lines #4392: fix parameter name. ........ r67359 | georg.brandl | 2008-11-23 15:57:30 -0600 (Sun, 23 Nov 2008) | 2 lines #4399: fix typo. ........ r67362 | gregory.p.smith | 2008-11-23 18:41:43 -0600 (Sun, 23 Nov 2008) | 2 lines Document PY_SSIZE_T_CLEAN for PyArg_ParseTuple. ........ r67364 | benjamin.peterson | 2008-11-23 19:16:29 -0600 (Sun, 23 Nov 2008) | 2 lines replace reference to debugger-hooks ........ r67365 | benjamin.peterson | 2008-11-23 22:09:03 -0600 (Sun, 23 Nov 2008) | 1 line #4396 make the parser module correctly validate the with syntax ........ r67367 | georg.brandl | 2008-11-24 10:16:07 -0600 (Mon, 24 Nov 2008) | 2 lines Fix typo. ........ r67368 | georg.brandl | 2008-11-24 13:56:47 -0600 (Mon, 24 Nov 2008) | 2 lines #4404: make clear what "path" is. ........ r67398 | benjamin.peterson | 2008-11-26 11:39:17 -0600 (Wed, 26 Nov 2008) | 1 line fix typo in sqlite3 docs ........ r67423 | jesse.noller | 2008-11-28 12:59:35 -0600 (Fri, 28 Nov 2008) | 2 lines issue4238: bsd support for cpu_count ........ r67424 | christian.heimes | 2008-11-28 13:33:33 -0600 (Fri, 28 Nov 2008) | 1 line Retain copyright of processing examples. This was requested by a Debian maintainer during packaging of the multiprocessing package for 2.4/2.5 ........ r67432 | benjamin.peterson | 2008-11-28 17:18:46 -0600 (Fri, 28 Nov 2008) | 1 line SVN format 9 is the same it seems ........ r67440 | jeremy.hylton | 2008-11-28 17:42:59 -0600 (Fri, 28 Nov 2008) | 4 lines Move definition int sval into branch of ifdef where it is used. Otherwise, you get a warning about an undefined variable. ........ r67441 | jeremy.hylton | 2008-11-28 18:09:16 -0600 (Fri, 28 Nov 2008) | 2 lines Reflow long lines. ........ r67444 | amaury.forgeotdarc | 2008-11-28 20:03:32 -0600 (Fri, 28 Nov 2008) | 2 lines Fix a small typo in docstring ........ r67445 | benjamin.peterson | 2008-11-29 21:07:33 -0600 (Sat, 29 Nov 2008) | 1 line StringIO.close() stops you from using the buffer, too ........ r67454 | benjamin.peterson | 2008-11-30 08:43:23 -0600 (Sun, 30 Nov 2008) | 1 line note the version that works ........ r67455 | martin.v.loewis | 2008-11-30 13:28:27 -0600 (Sun, 30 Nov 2008) | 1 line Issue #4365: Add crtassem.h constants to the msvcrt module. ........ r67457 | christian.heimes | 2008-11-30 15:16:28 -0600 (Sun, 30 Nov 2008) | 1 line w# requires Py_ssize_t ........ r67458 | benjamin.peterson | 2008-11-30 15:46:16 -0600 (Sun, 30 Nov 2008) | 1 line fix pyspecific extensions that were broken by Sphinx's grand renaming ........
2008-11-30 18:46:23 -04:00
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
import time, sys, random
from queue import Empty
import multiprocessing # may get overwritten
#### TEST_VALUE
def value_func(running, mutex):
random.seed()
time.sleep(random.random()*4)
mutex.acquire()
print('\n\t\t\t' + str(multiprocessing.current_process()) + ' has finished')
running.value -= 1
mutex.release()
def test_value():
TASKS = 10
running = multiprocessing.Value('i', TASKS)
mutex = multiprocessing.Lock()
for i in range(TASKS):
p = multiprocessing.Process(target=value_func, args=(running, mutex))
p.start()
while running.value > 0:
time.sleep(0.08)
mutex.acquire()
print(running.value, end=' ')
sys.stdout.flush()
mutex.release()
print()
print('No more running processes')
#### TEST_QUEUE
def queue_func(queue):
for i in range(30):
time.sleep(0.5 * random.random())
queue.put(i*i)
queue.put('STOP')
def test_queue():
q = multiprocessing.Queue()
p = multiprocessing.Process(target=queue_func, args=(q,))
p.start()
o = None
while o != 'STOP':
try:
o = q.get(timeout=0.3)
print(o, end=' ')
sys.stdout.flush()
except Empty:
print('TIMEOUT')
print()
#### TEST_CONDITION
def condition_func(cond):
cond.acquire()
print('\t' + str(cond))
time.sleep(2)
print('\tchild is notifying')
print('\t' + str(cond))
cond.notify()
cond.release()
def test_condition():
cond = multiprocessing.Condition()
p = multiprocessing.Process(target=condition_func, args=(cond,))
print(cond)
cond.acquire()
print(cond)
cond.acquire()
print(cond)
p.start()
print('main is waiting')
cond.wait()
print('main has woken up')
print(cond)
cond.release()
print(cond)
cond.release()
p.join()
print(cond)
#### TEST_SEMAPHORE
def semaphore_func(sema, mutex, running):
sema.acquire()
mutex.acquire()
running.value += 1
print(running.value, 'tasks are running')
mutex.release()
random.seed()
time.sleep(random.random()*2)
mutex.acquire()
running.value -= 1
print('%s has finished' % multiprocessing.current_process())
mutex.release()
sema.release()
def test_semaphore():
sema = multiprocessing.Semaphore(3)
mutex = multiprocessing.RLock()
running = multiprocessing.Value('i', 0)
processes = [
multiprocessing.Process(target=semaphore_func,
args=(sema, mutex, running))
for i in range(10)
]
for p in processes:
p.start()
for p in processes:
p.join()
#### TEST_JOIN_TIMEOUT
def join_timeout_func():
print('\tchild sleeping')
time.sleep(5.5)
print('\n\tchild terminating')
def test_join_timeout():
p = multiprocessing.Process(target=join_timeout_func)
p.start()
print('waiting for process to finish')
while 1:
p.join(timeout=1)
if not p.is_alive():
break
print('.', end=' ')
sys.stdout.flush()
#### TEST_EVENT
def event_func(event):
print('\t%r is waiting' % multiprocessing.current_process())
event.wait()
print('\t%r has woken up' % multiprocessing.current_process())
def test_event():
event = multiprocessing.Event()
processes = [multiprocessing.Process(target=event_func, args=(event,))
for i in range(5)]
for p in processes:
p.start()
print('main is sleeping')
time.sleep(2)
print('main is setting event')
event.set()
for p in processes:
p.join()
#### TEST_SHAREDVALUES
def sharedvalues_func(values, arrays, shared_values, shared_arrays):
for i in range(len(values)):
v = values[i][1]
sv = shared_values[i].value
assert v == sv
for i in range(len(values)):
a = arrays[i][1]
sa = list(shared_arrays[i][:])
assert a == sa
print('Tests passed')
def test_sharedvalues():
values = [
('i', 10),
('h', -2),
('d', 1.25)
]
arrays = [
('i', list(range(100))),
('d', [0.25 * i for i in range(100)]),
('H', list(range(1000)))
]
shared_values = [multiprocessing.Value(id, v) for id, v in values]
shared_arrays = [multiprocessing.Array(id, a) for id, a in arrays]
p = multiprocessing.Process(
target=sharedvalues_func,
args=(values, arrays, shared_values, shared_arrays)
)
p.start()
p.join()
assert p.exitcode == 0
####
def test(namespace=multiprocessing):
global multiprocessing
multiprocessing = namespace
for func in [ test_value, test_queue, test_condition,
test_semaphore, test_join_timeout, test_event,
test_sharedvalues ]:
print('\n\t######## %s\n' % func.__name__)
func()
ignore = multiprocessing.active_children() # cleanup any old processes
if hasattr(multiprocessing, '_debug_info'):
info = multiprocessing._debug_info()
if info:
print(info)
raise ValueError('there should be no positive refcounts left')
if __name__ == '__main__':
multiprocessing.freeze_support()
assert len(sys.argv) in (1, 2)
if len(sys.argv) == 1 or sys.argv[1] == 'processes':
print(' Using processes '.center(79, '-'))
namespace = multiprocessing
elif sys.argv[1] == 'manager':
print(' Using processes and a manager '.center(79, '-'))
namespace = multiprocessing.Manager()
namespace.Process = multiprocessing.Process
namespace.current_process = multiprocessing.current_process
namespace.active_children = multiprocessing.active_children
elif sys.argv[1] == 'threads':
print(' Using threads '.center(79, '-'))
import multiprocessing.dummy as namespace
else:
print('Usage:\n\t%s [processes | manager | threads]' % sys.argv[0])
raise SystemExit(2)
test(namespace)