Skip to content

Dropping DAE/ODE labels from many things. #199

Dropping DAE/ODE labels from many things.

Dropping DAE/ODE labels from many things. #199

Workflow file for this run

name: Test and release
on:
push:
jobs:
test:
name: Run unit tests on the ontology
if: "!contains(github.event.head_commit.message, '[Skip CI]')"
runs-on: ubuntu-latest
steps:
# Check out the repository
- name: Checkout repo
uses: actions/checkout@master
with:
fetch-depth: 1
# Install the requirements for the tests
- name: Install Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Setup pip cache
uses: actions/cache@v2
with:
path: /opt/hostedtoolcache/Python
key: ${{ runner.os }}-pip-${{ hashFiles('libkisao/python/requirements.txt') }}-${{ hashFiles('libkisao/python/tests/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install pip and setuptools
run: |
python -m pip install --upgrade pip==23.0.0
python -m pip install --upgrade pip==23.0.0
- name: Install the Python KiSAO package
working-directory: libkisao/python
run: |
cp ../../kisao.owl kisao/
python -m pip install .[all]
# Lint the package
- name: Install flake8
run: python -m pip install flake8
- name: Lint the package
working-directory: libkisao/python
run: python -m flake8
# Run the tests
- name: Install the requirements for the tests
working-directory: libkisao/python
run: |
python -m pip install pytest pytest-cov
python -m pip install -r tests/requirements.txt
- name: Run the tests
working-directory: libkisao/python
run: python -m pytest tests/ --cov
# Compile documentation
- name: Install the requirements for compiling the documentation
working-directory: libkisao/python
run: python -m pip install -r docs-src/requirements.txt
- name: Compile the documentation
working-directory: libkisao/python
run: |
sphinx-apidoc . setup.py --output-dir docs-src/source --force --module-first --no-toc
mkdir -p docs-src/_static
sphinx-build docs-src docs
- name: Compile a report of the substitutability among algorithms
working-directory: libkisao/python
shell: python
run: |
import kisao.utils
import os
report = kisao.utils.get_algorithm_substitution_matrix()
report.to_csv(os.path.join('docs', 'algorithm-substitutability.csv'))
- name: Upload the coverage report to Codecov
if: false
uses: codecov/codecov-action@v2
with:
token: ${{ secrets.CODECOV_TOKEN }}
flags: unittests
file: ./libkisao/python/coverage.xml
getMainBranchHeadRevision:
name: Get the head revision of the main branch
if: startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
outputs:
mainBranchHeadRevision: ${{ steps.get-main-branch.outputs.mainBranchHeadRevision }}
steps:
- name: Clone repository
run: |
git clone https://${{ github.actor }}:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }} .
- id: get-main-branch
name: Determine main branch
run: |
mainBranch=$(git symbolic-ref refs/remotes/origin/HEAD | cut -d '/' -f 4)
mainBranchHeadRevision=$(git rev-parse refs/remotes/origin/${mainBranch})
echo "mainBranchHeadRevision=$mainBranchHeadRevision" >> $GITHUB_OUTPUT
determineIfReleaseNeeded:
name: Determine if the package should be released
needs:
- getMainBranchHeadRevision
if: startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
outputs:
release: ${{ steps.determine-if-release-needed.outputs.release }}
steps:
# Check out the repository
- name: Checkout repo
uses: actions/checkout@master
with:
fetch-depth: 1
- id: determine-if-release-needed
name: Determine if a release should be made
run: |
release="0"
if [[ "${{ github.ref }}" =~ ^refs/tags/ ]]; then
echo "First check passed"
tag_hash=$(git rev-parse "${{ github.ref }}")
echo "tag_hash: $tag_hash"
if [ "$tag_hash" == "${{ needs.getMainBranchHeadRevision.outputs.mainBranchHeadRevision }}" ]; then
echo "Second check passed"
release="1"
fi
fi
echo "release=$release" >> $GITHUB_OUTPUT
echo "release=$release"
commitCompiledDocumentation:
name: Commit and push compiled documentation to GitHub
needs:
- test
- determineIfReleaseNeeded
if: startsWith(github.ref, 'refs/tags/') && needs.determineIfReleaseNeeded.outputs.release == '1'
runs-on: ubuntu-latest
steps:
# Check out the repository
- name: Checkout repo
uses: actions/checkout@master
with:
fetch-depth: 1
# ref: dev
# token: ${{ secrets.ADMIN_GITHUB_TOKEN }}
# Install the requirements for the tests
- name: Install Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Setup pip cache
uses: actions/cache@v2
with:
path: /opt/hostedtoolcache/Python
key: ${{ runner.os }}-pip-${{ hashFiles('libkisao/python/requirements.txt') }}-${{ hashFiles('libkisao/python/tests/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install pip and setuptools
run: |
python -m pip install --upgrade pip==23.0.0
python -m pip install --upgrade pip==23.0.0
- name: Install the Python KiSAO package
working-directory: libkisao/python
run: |
cp ../../kisao.owl kisao/
python -m pip install .[all]
- name: Install the requirements for compiling the documentation
working-directory: libkisao/python
run: python -m pip install -r docs-src/requirements.txt
- name: Compile the documentation
working-directory: libkisao/python
run: |
sphinx-apidoc . setup.py --output-dir docs-src/source --force --module-first --no-toc
mkdir -p docs-src/_static
sphinx-build docs-src docs
- name: Compile a report of the substitutability among algorithms
working-directory: libkisao/python
shell: python
run: |
import kisao.utils
import os
report = kisao.utils.get_algorithm_substitution_matrix()
report.to_csv(os.path.join('docs', 'algorithm-substitutability.csv'))
# If new tag, commit and push documentation
- name: Configure Git
run: |
git config --local user.email "[email protected]"
git config --local user.name "SED-ML bot"
git config pull.rebase false
- id: commit-docs
name: Commit the compiled documentation
working-directory: libkisao/python
run: |
git stash
git pull
set +e
git stash pop
git add docs
git commit -m "Updated compiled documentation [Skip CI]"
git checkout .
git clean -f -d
if [[ $? = 0 ]]; then
docsChanged=1
else
docsChanged=0
fi
echo "docsChanged=$docsChanged" >> $GITHUB_OUTPUT
- name: Push the compiled documentation
if: startsWith(github.ref, 'refs/tags/') && steps.commit-docs.outputs.docsChanged == '1'
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.ADMIN_GITHUB_TOKEN }}
branch: ${{ steps.get-main-branch.outputs.mainBranch }}
createGitHubRelease:
name: Create GitHub release
needs:
- test
- determineIfReleaseNeeded
if: startsWith(github.ref, 'refs/tags/') && needs.determineIfReleaseNeeded.outputs.release == '1'
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@master
with:
fetch-depth: 1
# ref: dev
# token: ${{ secrets.ADMIN_GITHUB_TOKEN }}
- name: Merge dev branch into deploy branch
uses: devmasx/[email protected]
with:
type: now
target_branch: deploy
github_token: ${{ secrets.ADMIN_GITHUB_TOKEN }}
- id: get-version
name: Get version number
env:
TAG: ${{ github.ref }}
run: |
version="${TAG/refs\/tags\//}"
echo "version=$version" >> $GITHUB_OUTPUT
- name: Create GitHub release
uses: "marvinpinto/action-automatic-releases@latest"
with:
repo_token: "${{ secrets.ADMIN_GITHUB_TOKEN }}"
draft: false
prerelease: false
automatic_release_tag: ${{ steps.get-version.outputs.version }}
releaseToBioPortal:
name: Release version to BioPortal
needs:
- test
- determineIfReleaseNeeded
if: startsWith(github.ref, 'refs/tags/') && needs.determineIfReleaseNeeded.outputs.release == '1'
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@master
- name: Install jq
run: |
sudo apt-get update -y
sudo apt-get install -y --no-install-recommends jq moreutils
- name: Prepare BioPortal submission
env:
TAG: ${{ github.ref }}
run: |
version="${TAG/refs\/tags\//}"
released=$(date --iso-8601=second)
cp .github/workflows/BioPortal-submission.json .github/workflows/BioPortal-submission-version.json
jq \
".pullLocation = \"https://raw.githubusercontent.com/SED-ML/KiSAO/${version}/kisao.owl\"" \
.github/workflows/BioPortal-submission-version.json | sponge .github/workflows/BioPortal-submission-version.json
jq \
".version = \"${version}\"" \
.github/workflows/BioPortal-submission-version.json | sponge .github/workflows/BioPortal-submission-version.json
jq \
".released = \"${released}\"" \
.github/workflows/BioPortal-submission-version.json | sponge .github/workflows/BioPortal-submission-version.json
- name: Submit to BioPortal
run: |
curl -X POST \
-H "Authorization: apikey token=${{ secrets.BIOPORTAL_API_KEY }}" \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
-d @.github/workflows/BioPortal-submission-version.json \
http://data.bioontology.org/ontologies/KISAO/submissions
echo
releaseToPyPI:
name: Release version to PyPI
needs:
- test
- determineIfReleaseNeeded
if: startsWith(github.ref, 'refs/tags/') && needs.determineIfReleaseNeeded.outputs.release == '1'
runs-on: ubuntu-latest
steps:
# Check out the repository
- name: Checkout repo
uses: actions/checkout@master
with:
fetch-depth: 1
# Install the requirements for the tests
- name: Install Python
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: Setup pip cache
uses: actions/cache@v2
with:
path: /opt/hostedtoolcache/Python
key: ${{ runner.os }}-pip-${{ hashFiles('libkisao/python/requirements.txt') }}-${{ hashFiles('libkisao/python/tests/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install pip
run: |
python -m pip install --upgrade pip==23.0.0
python -m pip install --upgrade pip==23.0.0
# Create PyPI release
- name: Create PyPI release
working-directory: libkisao/python
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
run: |
# Install pandoc
sudo apt-get update -y
sudo apt-get install -y --no-install-recommends wget
wget "https://github.com/jgm/pandoc/releases/download/3.1.3/pandoc-3.1.3-1-amd64.deb" -O /tmp/pandoc.deb
sudo dpkg -i /tmp/pandoc.deb
rm /tmp/pandoc.deb
# Copy OWL file and LICENSE so it can be bundled into the Python package
cp ../../kisao.owl kisao/
cp ../../LICENSE .
# Convert README to .rst format
pandoc --from=gfm --output=README.rst --to=rst README.md
# Install twine
python -m pip install wheel twine
# Create packages to upload to PyPI
python setup.py sdist
python setup.py bdist_wheel
# Upload packages to PyPI
twine upload dist/*