-
Notifications
You must be signed in to change notification settings - Fork 10
212 lines (202 loc) · 7.63 KB
/
installation_test.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
name: unit tests
on:
push:
branches:
- master
pull_request:
branches:
- master
workflow_dispatch:
jobs:
build-linux:
defaults:
run:
shell: bash -l {0}
runs-on: ubuntu-latest
strategy:
max-parallel: 5
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
python-version: "3.10"
mamba-version: "*"
channels: conda-forge,alchem0x2a,defaults
channel-priority: true
activate-environment: sparc-api-test
- name: Install dependencies
run: |
# mamba install -c conda-forge ase>=3.22 pymatgen flake8 pytest
mamba install -c conda-forge sparc-x
# pip install pyfakefs
- name: Install package
run: |
pip install -e ".[test]"
# Download the external psp data
python -m sparc.download_data
- name: Download SPARC output files to SPARC-master
run: |
# Pin the current version of SPARC to versions before MLFF
# wget https://github.com/SPARC-X/SPARC/archive/refs/heads/master.zip
# unzip master.zip
wget -O SPARC-master.zip https://codeload.github.com/SPARC-X/SPARC/zip/3371b4401e4ebca0921fb77a02587f578f3bf3f7
unzip SPARC-master.zip
mv SPARC-33* SPARC-master
- name: Test with pytest
run: |
# python -m pytest -svv tests/ --cov=sparc --cov-report=json --cov-report=html
export SPARC_TESTS_DIR="./SPARC-master/tests"
export ASE_SPARC_COMMAND="mpirun -n 1 sparc"
export SPARC_DOC_PATH="./SPARC-master/doc/.LaTeX"
coverage run -a -m pytest -svv tests/
coverage json --omit="tests/*.py"
coverage html --omit="tests/*.py"
COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0`
echo "Current coverage is $COVERAGE"
echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV
- name: Lint with flake8
run: |
echo $CONDA_PREFIX
conda info
flake8 sparc/ --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 sparc/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: SPARC API version
run: |
python -c "from sparc.api import SparcAPI; import os; ver=SparcAPI().sparc_version; os.system(f'echo API_VERSION={ver} >> $GITHUB_ENV')"
- name: Create badges
run: |
mkdir -p badges/
# A coverage test badge
anybadge --value=$COVPERCENT --file=badges/coverage.svg coverage
# A version badge
PIPVER=`pip show sparc-x-api | grep Version | cut -d ' ' -f2`
anybadge --value=$PIPVER --file=badges/package.svg -l sparc-x-api
# api version badge
anybadge --value=${API_VERSION} --file=badges/api_version.svg -l "json-api version"
- name: Manually add git badges
run: |
# Assuming a badges branch already exists!
rm -rf /tmp/*.svg && cp badges/*.svg /tmp/
git fetch
git switch badges || { echo "Could not check out badges branch. Have you created it on remote?"; exit 1; }
git pull
cp /tmp/*.svg badges/ && git add -f badges/*.svg && rm -rf /tmp/*.svg
git config --global user.email "[email protected]"
git config --global user.name "Github Action Bot (badges only)"
git commit -m "Update badges from run ${RID}" || true
git push -u origin badges || true
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
RID: ${{ github.run_id }}
test-socket:
defaults:
run:
shell: bash -l {0}
runs-on: ubuntu-latest
strategy:
max-parallel: 5
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
python-version: "3.10"
mamba-version: "*"
channels: conda-forge,alchem0x2a,defaults
channel-priority: true
activate-environment: sparc-api-test
- name: Install dependencies
run: |
# mamba install -c conda-forge ase>=3.22 pymatgen flake8 pytest
mamba install -c conda-forge make compilers openmpi fftw scalapack openblas
- name: Install package
run: |
pip install -e ".[test]"
# Download the external psp data
python -m sparc.download_data
- name: Download SPARC output files to SPARC-master
run: |
# TODO: merge to master
wget -O SPARC-socket.zip https://codeload.github.com/alchem0x2A/SPARC/zip/refs/heads/socket
unzip SPARC-socket.zip
- name: Compile SPARC with socket
run: |
cd SPARC-socket/src
make clean
make -j2 USE_SOCKET=1 USE_MKL=0 USE_SCALAPACK=1 DEBUG_MODE=1
ls ../lib
- name: Test with pytest
run: |
ls ./SPARC-socket/lib/sparc
PWD=$(pwd)
export SPARC_TESTS_DIR="${PWD}/SPARC-socket/tests"
export ASE_SPARC_COMMAND="mpirun -n 1 ${PWD}/SPARC-socket/lib/sparc"
export SPARC_DOC_PATH="${PWD}/SPARC-socket/doc/.LaTeX"
coverage run -a -m pytest -svv tests/
coverage json --omit="tests/*.py"
coverage html --omit="tests/*.py"
COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0`
echo "Current coverage is $COVERAGE"
echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV
- name: Lint with flake8
run: |
echo $CONDA_PREFIX
conda info
flake8 sparc/ --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 sparc/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
# To be deleted once 1.0 release is done
build-linux-ase-3-22:
defaults:
run:
shell: bash -l {0}
runs-on: ubuntu-latest
strategy:
max-parallel: 5
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
python-version: "3.10"
mamba-version: "*"
channels: conda-forge,alchem0x2a,defaults
channel-priority: true
activate-environment: sparc-api-test
- name: Install dependencies
run: |
# mamba install -c conda-forge ase>=3.22 pymatgen flake8 pytest
mamba install -c conda-forge make compilers openmpi fftw scalapack openblas
- name: Install package
run: |
pip install -e ".[test]" ase==3.22 numpy==1.24 scipy==1.10
# Manually downgrade
# Download the external psp data
python -m sparc.download_data
- name: Download SPARC output files to SPARC-master
run: |
# TODO: merge to master
wget -O SPARC-socket.zip https://codeload.github.com/alchem0x2A/SPARC/zip/refs/heads/socket
unzip SPARC-socket.zip
- name: Compile SPARC with socket
run: |
cd SPARC-socket/src
make clean
make -j2 USE_SOCKET=1 USE_MKL=0 USE_SCALAPACK=1 DEBUG_MODE=1
ls ../lib
- name: Test with pytest
run: |
ls ./SPARC-socket/lib/sparc
PWD=$(pwd)
export SPARC_TESTS_DIR="${PWD}/SPARC-socket/tests"
export ASE_SPARC_COMMAND="mpirun -n 1 ${PWD}/SPARC-socket/lib/sparc"
export SPARC_DOC_PATH="${PWD}/SPARC-socket/doc/.LaTeX"
coverage run -a -m pytest -svv tests/
coverage json --omit="tests/*.py"
coverage html --omit="tests/*.py"
COVERAGE=`cat coverage.json | jq .totals.percent_covered | xargs printf '%.*f' 0`
echo "Current coverage is $COVERAGE"
echo "COVPERCENT=$COVERAGE" >> $GITHUB_ENV
- name: Lint with flake8
run: |
echo $CONDA_PREFIX
conda info
flake8 sparc/ --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 sparc/ --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics