forked from SpikeInterface/spikeinterface
-
Notifications
You must be signed in to change notification settings - Fork 0
83 lines (83 loc) · 3.4 KB
/
codecov.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
name: Full spikeinterface tests codecov
on: workflow_dispatch
jobs:
build-and-test:
name: Codecov
runs-on: "ubuntu-latest"
steps:
- uses: actions/checkout@v2
with:
python-version: 3.8
- name: Get current year-month
id: date
run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT
- uses: actions/cache@v2
id: cache-venv
with:
path: ~/test_env
key: ${{ runner.os }}-venv-${{ hashFiles('**/requirements.txt') }}${{ hashFiles('**/requirements_full.txt') }}-${{ hashFiles('**/requirements_extractors.txt') }}-${{ hashFiles('**/requirements_test.txt') }}-${{ steps.date.outputs.date }}
- name: Install dependencies
run: |
sudo apt update
# this is for datalad and download testing datasets
sudo apt install git
# needed for correct operation of git/git-annex/DataLad
git config --global user.email "[email protected]"
git config --global user.name "CI Almighty"
# this is for spyking circus
sudo apt install mpich libmpich-dev
# create an environement (better for cahing)
python -m venv ~/test_env
source ~/test_env/bin/activate
python -m pip install --upgrade pip
pip install setuptools wheel twine
## clean some cache to avoid using old cache
pip cache remove numpy
pip cache remove hdbscan
pip cache remove numba
# TODO remove it after neo release
# because it force some version (numpy/numba/dhdbscan...)
pip install numpy==1.20
pip install -r requirements_test.txt
pip install -r requirements.txt
pip install -r requirements_full.txt
pip install -r requirements_extractors.txt
pip install -e .
- name: git-annex install
run: |
wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz
tar xvzf git-annex-standalone-amd64.tar.gz
echo "$(pwd)/git-annex.linux" >> $GITHUB_PATH
- name: git-annex version
run: |
git-annex version
- name: pip list
run: |
source ~/test_env/bin/activate
pip list
- name: Get ephy_testing_data current head hash
# the key depend on the last comit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
id: vars
run: |
echo "HASH_EPHY_DATASET=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT
- uses: actions/cache@v2
id: cache-datasets
env:
# the key depend on the last comit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git
HASH_EPHY_DATASET: git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1
with:
path: ~/spikeinterface_datasets
key: ${{ runner.os }}-datasets-${{ steps.vars.outputs.HASH_EPHY_DATASET }}
restore-keys: |
${{ runner.os }}-datasets
- name: run tests
run: |
source ~/test_env/bin/activate
pytest --cov=./ --cov-report xml:./coverage.xml
- uses: codecov/codecov-action@v2
with:
token: ${{ secrets.CODECOV_TOKEN }}
version: "v0.1.15"
fail_ci_if_error: true
file: ./coverage.xml
flags: unittests