Skip to content

Commit

Permalink
Make test_codegen.py testable in isolation
Browse files Browse the repository at this point in the history
must install pytest and astpretty, then run python3 -m pytest tests/python/codegen/test_codegen.py
  • Loading branch information
ptheywood committed Nov 27, 2023
1 parent 3801865 commit be36fee
Show file tree
Hide file tree
Showing 2 changed files with 68 additions and 58 deletions.
18 changes: 12 additions & 6 deletions tests/python/codegen/test_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,13 @@
import pytest
import unittest
import ast
import pyflamegpu.codegen
try:
import pyflamegpu.codegen as codegen
except:
# If pyflamegpu is not in the current env, use a local import of just the codegen submodule intead.
import pathlib
sys.path.append(f"{pathlib.Path(__file__).parent}/../../../swig/python/")
import codegen
import astpretty


Expand Down Expand Up @@ -591,7 +597,7 @@ def _checkExpected(self, source, expected):
tree = ast.parse(source)
if DEBUG_OUT:
astpretty.pprint(tree)
code = pyflamegpu.codegen.codegen(tree)
code = codegen.codegen(tree)
# remove new lines
code = code.strip()
expected = expected.strip()
Expand All @@ -606,10 +612,10 @@ def _checkWarning(self, source, expected, warning_str):
assert warning_str in str(record[0].message)

def _checkException(self, source, exception_str):
with pytest.raises(pyflamegpu.codegen.CodeGenException) as e:
with pytest.raises(codegen.CodeGenException) as e:
tree = ast.parse(source.strip())
# code generate
code = pyflamegpu.codegen.codegen(tree)
code = codegen.codegen(tree)
if EXCEPTION_MSG_CHECKING:
assert exception_str in str(e.value)

Expand Down Expand Up @@ -901,7 +907,7 @@ def test_fgpu_agent_func_comments(self):
def test_fgpu_agent_func_input_types(self):
""" Try all the message input types by using a string replacement """
# try all correct types
for msg_type in pyflamegpu.codegen.CodeGenerator.fgpu_message_types:
for msg_type in codegen.CodeGenerator.fgpu_message_types:
py_func = py_fgpu_agent_func.replace("pyflamegpu.MessageNone", msg_type)
cpp_msg_type = msg_type.replace("pyflamegpu.", "flamegpu::")
cpp_output = cpp_fgpu_agent_func.replace("flamegpu::MessageNone", cpp_msg_type)
Expand All @@ -913,7 +919,7 @@ def test_fgpu_agent_func_input_types(self):
def test_fgpu_agent_func_output_types(self):
""" Try all the message output types by using a string replacement """
# try all correct types
for msg_type in pyflamegpu.codegen.CodeGenerator.fgpu_message_types:
for msg_type in codegen.CodeGenerator.fgpu_message_types:
py_func = py_fgpu_agent_func.replace("pyflamegpu.MessageBruteForce", msg_type)
cpp_msg_type = msg_type.replace("pyflamegpu.", "flamegpu::")
cpp_output = cpp_fgpu_agent_func.replace("flamegpu::MessageBruteForce", cpp_msg_type)
Expand Down
108 changes: 56 additions & 52 deletions tests/python/conftest.py
Original file line number Diff line number Diff line change
@@ -1,62 +1,66 @@
import pytest
import os
import sys
import pyflamegpu
try:
import pyflamegpu

"""
Use pytest a pytest class fixture and a pytest sessionfinish hooks to handle telemetry
The class fixture is executed per test class for all test classes within this directory or below.
It records the telemetry enabled state, disables telemetry, and then restores telemetry to the original value.
"""
Use pytest a pytest class fixture and a pytest sessionfinish hooks to handle telemetry
If telemetry is enabled and more than one test was executed, submit the test results to telemetry deck at session end.
The class fixture is executed per test class for all test classes within this directory or below.
It records the telemetry enabled state, disables telemetry, and then restores telemetry to the original value.
We cannot rely on sessionstart incase the pytest entry point is above this file, so disabling and restoring telemetry per test class is the more reliable option.
"""
If telemetry is enabled and more than one test was executed, submit the test results to telemetry deck at session end.
@pytest.fixture(scope="class", autouse=True)
def class_fixture():
"""Class scoped fixture to disable telemetry, ensuring this is done for all tests below this conftest.py file, regardless of whether the pytest entry point was above this (i.e. it isn't reliable to do in a session_start.)
We cannot rely on sessionstart incase the pytest entry point is above this file, so disabling and restoring telemetry per test class is the more reliable option.
"""
# Get the current value
was_enabled = pyflamegpu.Telemetry.isEnabled()
# Disable telemetry
pyflamegpu.Telemetry.disable()
# Disable the suppression notice
pyflamegpu.Telemetry.suppressNotice()
yield
# Set telemetry back to the original value, this avoids the need for the unreliable session_start call.
if was_enabled:
pyflamegpu.Telemetry.enable()

def pytest_sessionfinish(session, exitstatus):
"""Hook to execute code during session tear down, once all tests have been executed, and the final status is known.
If telemetry is enabled (fixture re-enables if required) submit test result telemetry as long as more than one test was executed (to avoid 3rd party tool test running spamming the API).
"""
# only submit telemetry if it was originally enabled
if pyflamegpu.Telemetry.isEnabled():
# get the terminal reporter to query pass and fails
terminalreporter = session.config.pluginmanager.get_plugin('terminalreporter')
# Exit if the terminalreport plugin could not be found
if not terminalreporter:
return
outcome = "Passed" if exitstatus == 0 else f"Failed(code={exitstatus})"
passed = len(terminalreporter.stats.get('passed', []))
failed = len(terminalreporter.stats.get('failed', []))
skipped = len(terminalreporter.stats.get('skipped', []))
deselected = len(terminalreporter.stats.get('deselected', []))
total = passed + failed + skipped + deselected
selected = passed + failed
@pytest.fixture(scope="class", autouse=True)
def class_fixture():
"""Class scoped fixture to disable telemetry, ensuring this is done for all tests below this conftest.py file, regardless of whether the pytest entry point was above this (i.e. it isn't reliable to do in a session_start.)
"""
# Get the current value
was_enabled = pyflamegpu.Telemetry.isEnabled()
# Disable telemetry
pyflamegpu.Telemetry.disable()
# Disable the suppression notice
pyflamegpu.Telemetry.suppressNotice()
yield
# Set telemetry back to the original value, this avoids the need for the unreliable session_start call.
if was_enabled:
pyflamegpu.Telemetry.enable()

def pytest_sessionfinish(session, exitstatus):
"""Hook to execute code during session tear down, once all tests have been executed, and the final status is known.
If telemetry is enabled (fixture re-enables if required) submit test result telemetry as long as more than one test was executed (to avoid 3rd party tool test running spamming the API).
"""
# only submit telemetry if it was originally enabled
if pyflamegpu.Telemetry.isEnabled():
# get the terminal reporter to query pass and fails
terminalreporter = session.config.pluginmanager.get_plugin('terminalreporter')
# Exit if the terminalreport plugin could not be found
if not terminalreporter:
return
outcome = "Passed" if exitstatus == 0 else f"Failed(code={exitstatus})"
passed = len(terminalreporter.stats.get('passed', []))
failed = len(terminalreporter.stats.get('failed', []))
skipped = len(terminalreporter.stats.get('skipped', []))
deselected = len(terminalreporter.stats.get('deselected', []))
total = passed + failed + skipped + deselected
selected = passed + failed

# If telemetry was enabled, and more than 1 test was executed
if selected > 1:
# Send the results to telemetry deck, using the wrapped but privatised method, silently fail if the curl request fails.
pyflamegpu._pyflamegpu.__TestSuiteTelemetry_sendResults("pytest-run"
, outcome
, total
, selected
, skipped
, passed
, failed
, session.config.getoption("verbose") > 0
, True) # True this was from Python
# If telemetry was enabled, and more than 1 test was executed
if selected > 1:
# Send the results to telemetry deck, using the wrapped but privatised method, silently fail if the curl request fails.
pyflamegpu._pyflamegpu.__TestSuiteTelemetry_sendResults("pytest-run"
, outcome
, total
, selected
, skipped
, passed
, failed
, session.config.getoption("verbose") > 0
, True) # True this was from Python

except:
pass

0 comments on commit be36fee

Please sign in to comment.