mirror of https://github.com/python/cpython
161 lines
6.8 KiB
Python
161 lines
6.8 KiB
Python
"""Test suite for the profile module."""
|
|
|
|
import sys
|
|
import pstats
|
|
import unittest
|
|
from difflib import unified_diff
|
|
from io import StringIO
|
|
from test.support import run_unittest
|
|
|
|
import profile
|
|
from test.profilee import testfunc, timer
|
|
|
|
|
|
class ProfileTest(unittest.TestCase):
|
|
|
|
profilerclass = profile.Profile
|
|
methodnames = ['print_stats', 'print_callers', 'print_callees']
|
|
expected_max_output = ':0(max)'
|
|
|
|
def get_expected_output(self):
|
|
return _ProfileOutput
|
|
|
|
@classmethod
|
|
def do_profiling(cls):
|
|
results = []
|
|
prof = cls.profilerclass(timer, 0.001)
|
|
start_timer = timer()
|
|
prof.runctx("testfunc()", globals(), locals())
|
|
results.append(timer() - start_timer)
|
|
for methodname in cls.methodnames:
|
|
s = StringIO()
|
|
stats = pstats.Stats(prof, stream=s)
|
|
stats.strip_dirs().sort_stats("stdname")
|
|
getattr(stats, methodname)()
|
|
output = s.getvalue().splitlines()
|
|
mod_name = testfunc.__module__.rsplit('.', 1)[1]
|
|
# Only compare against stats originating from the test file.
|
|
# Prevents outside code (e.g., the io module) from causing
|
|
# unexpected output.
|
|
output = [line.rstrip() for line in output if mod_name in line]
|
|
results.append('\n'.join(output))
|
|
return results
|
|
|
|
def test_cprofile(self):
|
|
results = self.do_profiling()
|
|
expected = self.get_expected_output()
|
|
self.assertEqual(results[0], 1000)
|
|
for i, method in enumerate(self.methodnames):
|
|
if results[i+1] != expected[method]:
|
|
print("Stats.%s output for %s doesn't fit expectation!" %
|
|
(method, self.profilerclass.__name__))
|
|
print('\n'.join(unified_diff(
|
|
results[i+1].split('\n'),
|
|
expected[method].split('\n'))))
|
|
|
|
def test_calling_conventions(self):
|
|
# Issue #5330: profile and cProfile wouldn't report C functions called
|
|
# with keyword arguments. We test all calling conventions.
|
|
stmts = [
|
|
"max([0])",
|
|
"max([0], key=int)",
|
|
"max([0], **dict(key=int))",
|
|
"max(*([0],))",
|
|
"max(*([0],), key=int)",
|
|
"max(*([0],), **dict(key=int))",
|
|
]
|
|
for stmt in stmts:
|
|
s = StringIO()
|
|
prof = self.profilerclass(timer, 0.001)
|
|
prof.runctx(stmt, globals(), locals())
|
|
stats = pstats.Stats(prof, stream=s)
|
|
stats.print_stats()
|
|
res = s.getvalue()
|
|
self.assertIn(self.expected_max_output, res,
|
|
"Profiling {0!r} didn't report max:\n{1}".format(stmt, res))
|
|
|
|
|
|
def regenerate_expected_output(filename, cls):
|
|
filename = filename.rstrip('co')
|
|
print('Regenerating %s...' % filename)
|
|
results = cls.do_profiling()
|
|
|
|
newfile = []
|
|
with open(filename, 'r') as f:
|
|
for line in f:
|
|
newfile.append(line)
|
|
if line.startswith('#--cut'):
|
|
break
|
|
|
|
with open(filename, 'w') as f:
|
|
f.writelines(newfile)
|
|
f.write("_ProfileOutput = {}\n")
|
|
for i, method in enumerate(cls.methodnames):
|
|
f.write('_ProfileOutput[%r] = """\\\n%s"""\n' % (
|
|
method, results[i+1]))
|
|
f.write('\nif __name__ == "__main__":\n main()\n')
|
|
|
|
|
|
def test_main():
|
|
run_unittest(ProfileTest)
|
|
|
|
def main():
|
|
if '-r' not in sys.argv:
|
|
test_main()
|
|
else:
|
|
regenerate_expected_output(__file__, ProfileTest)
|
|
|
|
|
|
# Don't remove this comment. Everything below it is auto-generated.
|
|
#--cut--------------------------------------------------------------------------
|
|
_ProfileOutput = {}
|
|
_ProfileOutput['print_stats'] = """\
|
|
28 27.972 0.999 27.972 0.999 profilee.py:110(__getattr__)
|
|
1 269.996 269.996 999.769 999.769 profilee.py:25(testfunc)
|
|
23/3 149.937 6.519 169.917 56.639 profilee.py:35(factorial)
|
|
20 19.980 0.999 19.980 0.999 profilee.py:48(mul)
|
|
2 39.986 19.993 599.830 299.915 profilee.py:55(helper)
|
|
4 115.984 28.996 119.964 29.991 profilee.py:73(helper1)
|
|
2 -0.006 -0.003 139.946 69.973 profilee.py:84(helper2_indirect)
|
|
8 311.976 38.997 399.912 49.989 profilee.py:88(helper2)
|
|
8 63.976 7.997 79.960 9.995 profilee.py:98(subhelper)"""
|
|
_ProfileOutput['print_callers'] = """\
|
|
:0(append) <- profilee.py:73(helper1)(4) 119.964
|
|
:0(exc_info) <- profilee.py:73(helper1)(4) 119.964
|
|
:0(hasattr) <- profilee.py:73(helper1)(4) 119.964
|
|
profilee.py:88(helper2)(8) 399.912
|
|
profilee.py:110(__getattr__) <- :0(hasattr)(12) 11.964
|
|
profilee.py:98(subhelper)(16) 79.960
|
|
profilee.py:25(testfunc) <- <string>:1(<module>)(1) 999.767
|
|
profilee.py:35(factorial) <- profilee.py:25(testfunc)(1) 999.769
|
|
profilee.py:35(factorial)(20) 169.917
|
|
profilee.py:84(helper2_indirect)(2) 139.946
|
|
profilee.py:48(mul) <- profilee.py:35(factorial)(20) 169.917
|
|
profilee.py:55(helper) <- profilee.py:25(testfunc)(2) 999.769
|
|
profilee.py:73(helper1) <- profilee.py:55(helper)(4) 599.830
|
|
profilee.py:84(helper2_indirect) <- profilee.py:55(helper)(2) 599.830
|
|
profilee.py:88(helper2) <- profilee.py:55(helper)(6) 599.830
|
|
profilee.py:84(helper2_indirect)(2) 139.946
|
|
profilee.py:98(subhelper) <- profilee.py:88(helper2)(8) 399.912"""
|
|
_ProfileOutput['print_callees'] = """\
|
|
:0(hasattr) -> profilee.py:110(__getattr__)(12) 27.972
|
|
<string>:1(<module>) -> profilee.py:25(testfunc)(1) 999.769
|
|
profilee.py:110(__getattr__) ->
|
|
profilee.py:25(testfunc) -> profilee.py:35(factorial)(1) 169.917
|
|
profilee.py:55(helper)(2) 599.830
|
|
profilee.py:35(factorial) -> profilee.py:35(factorial)(20) 169.917
|
|
profilee.py:48(mul)(20) 19.980
|
|
profilee.py:48(mul) ->
|
|
profilee.py:55(helper) -> profilee.py:73(helper1)(4) 119.964
|
|
profilee.py:84(helper2_indirect)(2) 139.946
|
|
profilee.py:88(helper2)(6) 399.912
|
|
profilee.py:73(helper1) -> :0(append)(4) -0.004
|
|
profilee.py:84(helper2_indirect) -> profilee.py:35(factorial)(2) 169.917
|
|
profilee.py:88(helper2)(2) 399.912
|
|
profilee.py:88(helper2) -> :0(hasattr)(8) 11.964
|
|
profilee.py:98(subhelper)(8) 79.960
|
|
profilee.py:98(subhelper) -> profilee.py:110(__getattr__)(16) 27.972"""
|
|
|
|
if __name__ == "__main__":
|
|
main()
|