Introduce DataSetImplements #542
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Python package | |
on: [pull_request] | |
jobs: | |
build: | |
runs-on: ubuntu-latest | |
timeout-minutes: 20 | |
strategy: | |
matrix: | |
python-version: ["3.9", "3.10", "3.11"] | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-java@v3 | |
with: | |
distribution: 'temurin' | |
java-version: 11 | |
- uses: vemonet/setup-spark@v1 | |
with: | |
spark-version: '3.4.1' | |
hadoop-version: '3' | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v4 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies | |
run: | | |
python -m pip install --upgrade pip | |
pip install -r requirements.txt | |
pip install -r requirements-dev.txt | |
- name: Linting | |
run: | | |
flake8 | |
pylint typedspark | |
mypy . | |
pyright . | |
bandit typedspark/**/*.py | |
black --check . | |
isort --check . | |
docformatter --black -c **/*.py | |
- name: Testing | |
run: | | |
# we run this test seperately, to ensure that it is run without an active Spark session | |
python -m pytest -m no_spark_session | |
coverage run -m pytest | |
coverage report -m --fail-under 100 | |
- name: Run notebooks | |
run: | | |
for FILE in docs/source/*.ipynb; do papermill $FILE output.json -k python3; done |