Skip to content

Commit

Permalink
Merge branch 'main' into add-tests-for-qm-structure
Browse files Browse the repository at this point in the history
  • Loading branch information
chrishalcrow authored Jul 2, 2024
2 parents 6529bd3 + 6dbf7c6 commit f66a3b2
Show file tree
Hide file tree
Showing 264 changed files with 9,909 additions and 3,092 deletions.
23 changes: 0 additions & 23 deletions .github/actions/show-test-environment/action.yml

This file was deleted.

118 changes: 118 additions & 0 deletions .github/determine_testing_environment.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
from pathlib import Path
import argparse
import os


# We get the list of files change as an input
parser = argparse.ArgumentParser()
parser.add_argument("changed_files_in_the_pull_request", nargs="*", help="List of changed files")
args = parser.parse_args()

changed_files_in_the_pull_request = args.changed_files_in_the_pull_request
changed_files_in_the_pull_request_paths = [Path(file) for file in changed_files_in_the_pull_request]

# We assume nothing has been changed

core_changed = False
pyproject_toml_changed = False
neobaseextractor_changed = False
extractors_changed = False
plexon2_changed = False
preprocessing_changed = False
postprocessing_changed = False
qualitymetrics_changed = False
sorters_changed = False
sorters_external_changed = False
sorters_internal_changed = False
comparison_changed = False
curation_changed = False
widgets_changed = False
exporters_changed = False
sortingcomponents_changed = False
generation_changed = False


for changed_file in changed_files_in_the_pull_request_paths:

file_is_in_src = changed_file.parts[0] == "src"

if not file_is_in_src:

if changed_file.name == "pyproject.toml":
pyproject_toml_changed = True

else:
if changed_file.name == "neobaseextractor.py":
neobaseextractor_changed = True
elif changed_file.name == "plexon2.py":
extractors_changed = True
elif "core" in changed_file.parts:
conditions_changed = True
elif "extractors" in changed_file.parts:
extractors_changed = True
elif "preprocessing" in changed_file.parts:
preprocessing_changed = True
elif "postprocessing" in changed_file.parts:
postprocessing_changed = True
elif "qualitymetrics" in changed_file.parts:
qualitymetrics_changed = True
elif "comparison" in changed_file.parts:
comparison_changed = True
elif "curation" in changed_file.parts:
curation_changed = True
elif "widgets" in changed_file.parts:
widgets_changed = True
elif "exporters" in changed_file.parts:
exporters_changed = True
elif "sortingcomponents" in changed_file.parts:
sortingcomponents_changed = True
elif "generation" in changed_file.parts:
generation_changed = True
elif "sorters" in changed_file.parts:
if "external" in changed_file.parts:
sorters_external_changed = True
elif "internal" in changed_file.parts:
sorters_internal_changed = True
else:
sorters_changed = True


run_everything = core_changed or pyproject_toml_changed or neobaseextractor_changed
run_generation_tests = run_everything or generation_changed
run_extractor_tests = run_everything or extractors_changed
run_preprocessing_tests = run_everything or preprocessing_changed
run_postprocessing_tests = run_everything or postprocessing_changed
run_qualitymetrics_tests = run_everything or qualitymetrics_changed
run_curation_tests = run_everything or curation_changed
run_sortingcomponents_tests = run_everything or sortingcomponents_changed

run_comparison_test = run_everything or run_generation_tests or comparison_changed
run_widgets_test = run_everything or run_qualitymetrics_tests or run_preprocessing_tests or widgets_changed
run_exporters_test = run_everything or run_widgets_test or exporters_changed

run_sorters_test = run_everything or sorters_changed
run_internal_sorters_test = run_everything or run_sortingcomponents_tests or sorters_internal_changed

install_plexon_dependencies = plexon2_changed

environment_varaiables_to_add = {
"RUN_EXTRACTORS_TESTS": run_extractor_tests,
"RUN_PREPROCESSING_TESTS": run_preprocessing_tests,
"RUN_POSTPROCESSING_TESTS": run_postprocessing_tests,
"RUN_QUALITYMETRICS_TESTS": run_qualitymetrics_tests,
"RUN_CURATION_TESTS": run_curation_tests,
"RUN_SORTINGCOMPONENTS_TESTS": run_sortingcomponents_tests,
"RUN_GENERATION_TESTS": run_generation_tests,
"RUN_COMPARISON_TESTS": run_comparison_test,
"RUN_WIDGETS_TESTS": run_widgets_test,
"RUN_EXPORTERS_TESTS": run_exporters_test,
"RUN_SORTERS_TESTS": run_sorters_test,
"RUN_INTERNAL_SORTERS_TESTS": run_internal_sorters_test,
"INSTALL_PLEXON_DEPENDENCIES": install_plexon_dependencies,
}

# Write the conditions to the GITHUB_ENV file
env_file = os.getenv("GITHUB_ENV")
with open(env_file, "a") as f:
for key, value in environment_varaiables_to_add.items():
f.write(f"{key}={value}\n")
39 changes: 27 additions & 12 deletions .github/import_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,37 +18,52 @@

n_samples = 10
# Note that the symbols at the end are for centering the table
markdown_output = f"## \n\n| Imported Module ({n_samples=}) | Importing Time (seconds) | Standard Deviation (seconds) |\n| :--: | :--------------: | :------------------: |\n"
markdown_output = f"## \n\n| Imported Module ({n_samples=}) | Importing Time (seconds) | Standard Deviation (seconds) | Times List (seconds) |\n| :--: | :--------------: | :------------------: | :-------------: |\n"

exceptions = []

for import_statement in import_statement_list:
time_taken_list = []
for _ in range(n_samples):
script_to_execute = (
f"import timeit \n"
f"import_statement = '{import_statement}' \n"
f"time_taken = timeit.timeit(import_statement, number=1) \n"
f"print(time_taken) \n"
)
f"import timeit \n"
f"import_statement = '{import_statement}' \n"
f"time_taken = timeit.timeit(import_statement, number=1) \n"
f"print(time_taken) \n"
)

result = subprocess.run(["python", "-c", script_to_execute], capture_output=True, text=True)

if result.returncode != 0:
error_message = (
f"Error when running {import_statement} \n"
f"Error in subprocess: {result.stderr.strip()}\n"
error_message = (
f"Error when running {import_statement} \n" f"Error in subprocess: {result.stderr.strip()}\n"
)
exceptions.append(error_message)
break

time_taken = float(result.stdout.strip())
time_taken_list.append(time_taken)

for time in time_taken_list:
import_time_threshold = 2.0 # Most of the times is sub-second but there outliers
if time >= import_time_threshold:
exceptions.append(
f"Importing {import_statement} took: {time:.2f} s. Should be <: {import_time_threshold} s."
)
break


if time_taken_list:
avg_time_taken = sum(time_taken_list) / len(time_taken_list)
std_dev_time_taken = math.sqrt(sum((x - avg_time_taken) ** 2 for x in time_taken_list) / len(time_taken_list))
markdown_output += f"| `{import_statement}` | {avg_time_taken:.2f} | {std_dev_time_taken:.2f} |\n"
avg_time = sum(time_taken_list) / len(time_taken_list)
std_time = math.sqrt(sum((x - avg_time) ** 2 for x in time_taken_list) / len(time_taken_list))
times_list_str = ", ".join(f"{time:.2f}" for time in time_taken_list)
markdown_output += f"| `{import_statement}` | {avg_time:.2f} | {std_time:.2f} | {times_list_str} |\n"

import_time_threshold = 1.0
if avg_time > import_time_threshold:
exceptions.append(
f"Importing {import_statement} took: {avg_time:.2f} s in average. Should be <: {import_time_threshold} s."
)

if exceptions:
raise Exception("\n".join(exceptions))
Expand Down
7 changes: 6 additions & 1 deletion .github/run_tests.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,13 @@
#!/bin/bash

MARKER=$1
NOVIRTUALENV=$2

# Check if the second argument is provided and if it is equal to --no-virtual-env
if [ -z "$NOVIRTUALENV" ] || [ "$NOVIRTUALENV" != "--no-virtual-env" ]; then
source $GITHUB_WORKSPACE/test_env/bin/activate
fi

source $GITHUB_WORKSPACE/test_env/bin/activate
pytest -m "$MARKER" -vv -ra --durations=0 --durations-min=0.001 | tee report.txt; test ${PIPESTATUS[0]} -eq 0 || exit 1
echo "# Timing profile of ${MARKER}" >> $GITHUB_STEP_SUMMARY
python $GITHUB_WORKSPACE/.github/build_job_summary.py report.txt >> $GITHUB_STEP_SUMMARY
Expand Down
Loading

0 comments on commit f66a3b2

Please sign in to comment.