Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

CI: run test_codegen.py #1154

Draft
wants to merge 3 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 57 additions & 0 deletions .github/workflows/test_codegen.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Test codegen.py in isolation of cuda.
name: test_codegen.py

on:
# Branch pushes to codegen related files
push:
branches:
- '**'
paths:
- ".github/workflows/test_codegen.yml"
- "swig/python/codegen/**"
- "tests/python/codegen/test_codegen.py"
# Allow manual invocation.
workflow_dispatch:

defaults:
run:
shell: bash

jobs:
configure:
runs-on: ubuntu-22.04
strategy:
fail-fast: false
# Multiplicative build matrix
matrix:
python:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"

# Name the job based on matrix/env options
name: "test_codegen ${{matrix.python}}"

# Define job-wide env constants, and promote matrix elements to env constants for portable steps.
env:
PYTHON: ${{ matrix.python}}

steps:
- uses: actions/checkout@v3

- name: Select Python
if: ${{ env.PYTHON != ''}}
uses: actions/setup-python@v4
with:
python-version: ${{ env.PYTHON }}

- name: Install python dependencies
if: ${{ env.PYTHON != ''}}
run: |
python3 -m pip install --upgrade pytest astpretty

- name: Run pytest on codegen.py
run: |
python3 -m pytest tests/python/codegen/test_codegen.py -v
18 changes: 12 additions & 6 deletions tests/python/codegen/test_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,13 @@
import pytest
import unittest
import ast
import pyflamegpu.codegen
try:
import pyflamegpu.codegen as codegen
except:
# If pyflamegpu is not in the current env, use a local import of just the codegen submodule intead.
import pathlib
sys.path.append(f"{pathlib.Path(__file__).parent}/../../../swig/python/")
import codegen
import astpretty


Expand Down Expand Up @@ -591,7 +597,7 @@ def _checkExpected(self, source, expected):
tree = ast.parse(source)
if DEBUG_OUT:
astpretty.pprint(tree)
code = pyflamegpu.codegen.codegen(tree)
code = codegen.codegen(tree)
# remove new lines
code = code.strip()
expected = expected.strip()
Expand All @@ -606,10 +612,10 @@ def _checkWarning(self, source, expected, warning_str):
assert warning_str in str(record[0].message)

def _checkException(self, source, exception_str):
with pytest.raises(pyflamegpu.codegen.CodeGenException) as e:
with pytest.raises(codegen.CodeGenException) as e:
tree = ast.parse(source.strip())
# code generate
code = pyflamegpu.codegen.codegen(tree)
code = codegen.codegen(tree)
if EXCEPTION_MSG_CHECKING:
assert exception_str in str(e.value)

Expand Down Expand Up @@ -901,7 +907,7 @@ def test_fgpu_agent_func_comments(self):
def test_fgpu_agent_func_input_types(self):
""" Try all the message input types by using a string replacement """
# try all correct types
for msg_type in pyflamegpu.codegen.CodeGenerator.fgpu_message_types:
for msg_type in codegen.CodeGenerator.fgpu_message_types:
py_func = py_fgpu_agent_func.replace("pyflamegpu.MessageNone", msg_type)
cpp_msg_type = msg_type.replace("pyflamegpu.", "flamegpu::")
cpp_output = cpp_fgpu_agent_func.replace("flamegpu::MessageNone", cpp_msg_type)
Expand All @@ -913,7 +919,7 @@ def test_fgpu_agent_func_input_types(self):
def test_fgpu_agent_func_output_types(self):
""" Try all the message output types by using a string replacement """
# try all correct types
for msg_type in pyflamegpu.codegen.CodeGenerator.fgpu_message_types:
for msg_type in codegen.CodeGenerator.fgpu_message_types:
py_func = py_fgpu_agent_func.replace("pyflamegpu.MessageBruteForce", msg_type)
cpp_msg_type = msg_type.replace("pyflamegpu.", "flamegpu::")
cpp_output = cpp_fgpu_agent_func.replace("flamegpu::MessageBruteForce", cpp_msg_type)
Expand Down
108 changes: 56 additions & 52 deletions tests/python/conftest.py
Original file line number Diff line number Diff line change
@@ -1,62 +1,66 @@
import pytest
import os
import sys
import pyflamegpu
try:
import pyflamegpu

"""
Use pytest a pytest class fixture and a pytest sessionfinish hooks to handle telemetry

The class fixture is executed per test class for all test classes within this directory or below.
It records the telemetry enabled state, disables telemetry, and then restores telemetry to the original value.
"""
Use pytest a pytest class fixture and a pytest sessionfinish hooks to handle telemetry

If telemetry is enabled and more than one test was executed, submit the test results to telemetry deck at session end.
The class fixture is executed per test class for all test classes within this directory or below.
It records the telemetry enabled state, disables telemetry, and then restores telemetry to the original value.

We cannot rely on sessionstart incase the pytest entry point is above this file, so disabling and restoring telemetry per test class is the more reliable option.
"""
If telemetry is enabled and more than one test was executed, submit the test results to telemetry deck at session end.

@pytest.fixture(scope="class", autouse=True)
def class_fixture():
"""Class scoped fixture to disable telemetry, ensuring this is done for all tests below this conftest.py file, regardless of whether the pytest entry point was above this (i.e. it isn't reliable to do in a session_start.)
We cannot rely on sessionstart incase the pytest entry point is above this file, so disabling and restoring telemetry per test class is the more reliable option.
"""
# Get the current value
was_enabled = pyflamegpu.Telemetry.isEnabled()
# Disable telemetry
pyflamegpu.Telemetry.disable()
# Disable the suppression notice
pyflamegpu.Telemetry.suppressNotice()
yield
# Set telemetry back to the original value, this avoids the need for the unreliable session_start call.
if was_enabled:
pyflamegpu.Telemetry.enable()

def pytest_sessionfinish(session, exitstatus):
"""Hook to execute code during session tear down, once all tests have been executed, and the final status is known.
If telemetry is enabled (fixture re-enables if required) submit test result telemetry as long as more than one test was executed (to avoid 3rd party tool test running spamming the API).
"""
# only submit telemetry if it was originally enabled
if pyflamegpu.Telemetry.isEnabled():
# get the terminal reporter to query pass and fails
terminalreporter = session.config.pluginmanager.get_plugin('terminalreporter')
# Exit if the terminalreport plugin could not be found
if not terminalreporter:
return
outcome = "Passed" if exitstatus == 0 else f"Failed(code={exitstatus})"
passed = len(terminalreporter.stats.get('passed', []))
failed = len(terminalreporter.stats.get('failed', []))
skipped = len(terminalreporter.stats.get('skipped', []))
deselected = len(terminalreporter.stats.get('deselected', []))
total = passed + failed + skipped + deselected
selected = passed + failed
@pytest.fixture(scope="class", autouse=True)
def class_fixture():
"""Class scoped fixture to disable telemetry, ensuring this is done for all tests below this conftest.py file, regardless of whether the pytest entry point was above this (i.e. it isn't reliable to do in a session_start.)
"""
# Get the current value
was_enabled = pyflamegpu.Telemetry.isEnabled()
# Disable telemetry
pyflamegpu.Telemetry.disable()
# Disable the suppression notice
pyflamegpu.Telemetry.suppressNotice()
yield
# Set telemetry back to the original value, this avoids the need for the unreliable session_start call.
if was_enabled:
pyflamegpu.Telemetry.enable()

def pytest_sessionfinish(session, exitstatus):
"""Hook to execute code during session tear down, once all tests have been executed, and the final status is known.
If telemetry is enabled (fixture re-enables if required) submit test result telemetry as long as more than one test was executed (to avoid 3rd party tool test running spamming the API).
"""
# only submit telemetry if it was originally enabled
if pyflamegpu.Telemetry.isEnabled():
# get the terminal reporter to query pass and fails
terminalreporter = session.config.pluginmanager.get_plugin('terminalreporter')
# Exit if the terminalreport plugin could not be found
if not terminalreporter:
return
outcome = "Passed" if exitstatus == 0 else f"Failed(code={exitstatus})"
passed = len(terminalreporter.stats.get('passed', []))
failed = len(terminalreporter.stats.get('failed', []))
skipped = len(terminalreporter.stats.get('skipped', []))
deselected = len(terminalreporter.stats.get('deselected', []))
total = passed + failed + skipped + deselected
selected = passed + failed

# If telemetry was enabled, and more than 1 test was executed
if selected > 1:
# Send the results to telemetry deck, using the wrapped but privatised method, silently fail if the curl request fails.
pyflamegpu._pyflamegpu.__TestSuiteTelemetry_sendResults("pytest-run"
, outcome
, total
, selected
, skipped
, passed
, failed
, session.config.getoption("verbose") > 0
, True) # True this was from Python
# If telemetry was enabled, and more than 1 test was executed
if selected > 1:
# Send the results to telemetry deck, using the wrapped but privatised method, silently fail if the curl request fails.
pyflamegpu._pyflamegpu.__TestSuiteTelemetry_sendResults("pytest-run"
, outcome
, total
, selected
, skipped
, passed
, failed
, session.config.getoption("verbose") > 0
, True) # True this was from Python

except:
pass