Skip to content

Commit

Permalink
Support specifying hooks in pyperformance (#361)
Browse files Browse the repository at this point in the history
* Support specifying hooks in pyperformance

* Adjust formatting

* Add a test
  • Loading branch information
mdboom authored Oct 15, 2024
1 parent 6a42ffc commit eb539c6
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 0 deletions.
4 changes: 4 additions & 0 deletions doc/usage.rst
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ Usage::
[--append FILENAME] [--manifest MANIFEST]
[--timeout TIMEOUT] [-b BM_LIST]
[--inherit-environ VAR_LIST] [-p PYTHON]
[--hook HOOK]

options::

Expand Down Expand Up @@ -146,6 +147,9 @@ options::
Use the same number of loops as a previous run
(i.e., don't recalibrate). Should be a path to a
.json file from a previous run.
--hook HOOK
Apply the given pyperf hook when running the
benchmarks.

show
----
Expand Down
8 changes: 8 additions & 0 deletions pyperformance/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
cmd_compare,
)

from pyperf import _hooks


def comma_separated(values):
values = [value.strip() for value in values.split(',')]
Expand Down Expand Up @@ -93,6 +95,12 @@ def parse_args():
help="Specify a timeout in seconds for a single "
"benchmark run (default: disabled)",
type=check_positive)
hook_names = list(_hooks.get_hook_names())
cmd.add_argument("--hook",
action="append",
choices=hook_names,
metavar=f"{', '.join(x for x in hook_names if not x.startswith('_'))}",
help="Apply the given pyperf hook(s) when running each benchmark")
filter_opts(cmd)

# show
Expand Down
3 changes: 3 additions & 0 deletions pyperformance/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,5 +242,8 @@ def get_pyperf_opts(options):
opts.append('--min-time=%s' % options.min_time)
if options.timeout:
opts.append('--timeout=%s' % options.timeout)
if options.hook:
for hook in options.hook:
opts.append('--hook=%s' % hook)

return opts
19 changes: 19 additions & 0 deletions pyperformance/tests/test_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,25 @@ def test_run_test_benchmarks(self):
capture=None,
)

def test_run_with_hook(self):
# We expect this to fail, since pystats requires a special build of Python
filename = self.resolve_tmp('bench-test-hook.json')

stdout = self.run_pyperformance(
'run',
'--manifest', os.path.join(tests.DATA_DIR, 'MANIFEST'),
'-b', 'all',
'-o', filename,
'--hook', 'pystats',
exitcode=1,
capture='combined'
)

self.assertIn(
"Can not collect pystats because python was not built with --enable-pystats",
stdout
)

###################################
# compile

Expand Down

0 comments on commit eb539c6

Please sign in to comment.