Skip to content

Multi-Plane and Multi-Channel Support for ScanImage #157

Multi-Plane and Multi-Channel Support for ScanImage

Multi-Plane and Multi-Channel Support for ScanImage #157

Workflow file for this run

name: Full Tests
on:
schedule:
- cron: "0 0 * * *" # daily
pull_request:
workflow_dispatch:
jobs:
run:
name: Full tests on ${{ matrix.os }} with Python ${{ matrix.python-version }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
python-version: [3.8, 3.9, "3.10", 3.11]
os: [ubuntu-latest, windows-latest, macos-latest]
steps:
- uses: s-weigand/setup-conda@v1
- uses: actions/checkout@v2
- run: git fetch --prune --unshallow --tags
- name: Setup Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Global Setup
run: |
pip install -U pip
pip install pytest-xdist
git config --global user.email "[email protected]"
git config --global user.name "CI Almighty"
pip install wheel # needed for scanimage
- name: Install roiextractors with minimal requirements
run: pip install .[test]
- name: Run minimal tests
run: pytest tests/test_internals -n auto --dist loadscope
- name: Test full installation (-e needed for codecov report)
run: pip install -e .[full]
- name: Get ophys_testing_data current head hash
id: ophys
run: echo "::set-output name=HASH_OPHYS_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)"
- name: Cache ophys dataset - ${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}
uses: actions/cache@v2
id: cache-ophys-datasets
with:
path: ./ophys_testing_data
key: ophys-datasets-042023-${{ matrix.os }}-${{ steps.ophys.outputs.HASH_OPHYS_DATASET }}
- if: steps.cache-ophys-datasets.outputs.cache-hit == false
name: Install and configure AWS CLI
run: |
pip install awscli==1.25.27
aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID }}
aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY }}
- if: steps.cache-ophys-datasets.outputs.cache-hit == false
name: Download data from S3
run: aws s3 cp --recursive s3://${{ secrets.S3_GIN_BUCKET }}//ophys_testing_data ./ophys_testing_data
- name: Run full pytest with coverage
run: pytest -n auto --dist loadscope --cov=./ --cov-report xml:./codecov.xml
- if: ${{ matrix.python-version == '3.9' && matrix.os == 'ubuntu-latest'}}
name: Upload full coverage to Codecov
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }}
file: ./codecov.xml
flags: unittests
name: codecov-umbrella
yml: ./codecov.yml