Removed calls to suppress_tf_logging as cellfinder now uses PyTorch (… #324
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: tests | |
on: | |
push: | |
branches: | |
- "main" | |
tags: | |
- "*" | |
pull_request: | |
workflow_dispatch: | |
env: | |
REGISTRY: ghcr.io | |
IMAGE_NAME: ${{ github.repository }} | |
jobs: | |
linting: | |
name: Linting | |
runs-on: ubuntu-latest | |
steps: | |
- uses: neuroinformatics-unit/actions/lint@v2 | |
manifest: | |
name: Check manifest | |
runs-on: ubuntu-latest | |
steps: | |
- uses: neuroinformatics-unit/actions/check_manifest@v2 | |
test: | |
needs: [linting, manifest] | |
name: ${{ matrix.os }} py${{ matrix.python-version }} | |
runs-on: ${{ matrix.os }} | |
strategy: | |
fail-fast: false | |
matrix: | |
# Run tests on ubuntu across all supported versions | |
python-version: ["3.9", "3.10"] | |
os: [ubuntu-latest] | |
# Include a Windows test and old/new Mac runs | |
include: | |
- os: macos-13 | |
python-version: "3.10" | |
- os: macos-latest | |
python-version: "3.10" | |
- os: windows-latest | |
python-version: "3.10" | |
steps: | |
# Cache atlases | |
- name: Cache brainglobe directory | |
uses: actions/cache@v3 | |
with: | |
path: | # ensure we don't cache any interrupted atlas download and extraction, if e.g. we cancel the workflow manually | |
~/.brainglobe | |
!~/.brainglobe/atlas.tar.gz | |
key: atlases-models | |
fail-on-cache-miss: true | |
enableCrossOsArchive: true | |
# install additional Macos dependencies | |
- name: install HDF5 libraries (needed on M1 Macs only) | |
if: matrix.os == 'macos-latest' | |
run: | | |
brew install hdf5 | |
# Cache cellfinder workflow data | |
- name: Cache data for cellfinder workflow tests | |
uses: actions/cache@v3 | |
with: | |
path: "~/.brainglobe-tests" | |
key: cellfinder-test-data | |
fail-on-cache-miss: true | |
enableCrossOsArchive: true | |
# Run tests | |
- uses: neuroinformatics-unit/actions/test@v2 | |
with: | |
python-version: ${{ matrix.python-version }} | |
secret-codecov-token: ${{ secrets.CODECOV_TOKEN }} | |
benchmarks: | |
name: Check benchmarks | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.10" | |
- name: Install asv | |
shell: bash | |
run: | | |
python -mpip install --upgrade pip | |
# We install the project to benchmark because we run `asv check` with the `existing` flag. | |
python -mpip install . | |
python -mpip install asv | |
- name: Run asv check | |
shell: bash | |
run: | | |
cd benchmarks | |
# With `existing`, the benchmarked project must be already installed, including all dependencies. | |
# see https://asv.readthedocs.io/en/v0.6.3/commands.html#asv-check | |
asv check -v -E existing | |
build_sdist_wheels: | |
name: Build source distribution | |
needs: [test] | |
if: github.event_name == 'push' && github.ref_type == 'tag' | |
runs-on: ubuntu-latest | |
steps: | |
- uses: neuroinformatics-unit/actions/build_sdist_wheels@v2 | |
upload_all: | |
name: Publish build distributions | |
needs: [build_sdist_wheels] | |
runs-on: ubuntu-latest | |
if: github.event_name == 'push' && github.ref_type == 'tag' | |
steps: | |
- uses: neuroinformatics-unit/actions/upload_pypi@v2 | |
with: | |
secret-pypi-key: ${{ secrets.TWINE_API_KEY }} |