Skip to content

Bump min version of python-dateutil for pandas 2.0 compatibility #192

Bump min version of python-dateutil for pandas 2.0 compatibility

Bump min version of python-dateutil for pandas 2.0 compatibility #192

name: Tests
on:
pull_request:
types: [opened, synchronize]
push:
branches:
- main
workflow_dispatch:
env:
PYARROW_IGNORE_TIMEZONE: 1
JAVA_HOME: "/usr/lib/jvm/java-11-openjdk-amd64"
ALTERYX_OPEN_SRC_UPDATE_CHECKER: False
jobs:
unit_latest_tests:
name: ${{ matrix.python_version }} ${{ matrix.directories }} unit tests
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
python_version: ["3.9", "3.10", "3.11"]
directories: ["Core", "Dask/Spark - All Other Tests", "Dask/Spark - Testing Table Accessor", "Dask/Spark - Testing to Disk with LatLong", "Dask/Spark - All other Serialization"]
steps:
- name: Set up python ${{ matrix.python_version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python_version }}
- name: Checkout repository
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.ref }}
repository: ${{ github.event.pull_request.head.repo.full_name }}
fetch-depth: 2
- name: Build woodwork package
run: make package
- name: Set up pip
run: |
pip config --site set global.progress_bar off
- name: Install woodwork with test requirements
run: |
python -m pip install -e unpacked_sdist/[test]
- if: ${{ startsWith(matrix.directories, 'Dask/Spark') }}
name: Install Dask and Spark Requirements
run: |
sudo apt update
sudo apt install -y openjdk-11-jre-headless
python -m pip install unpacked_sdist/[spark]
python -m pip install unpacked_sdist/[dask]
cd unpacked_sdist
coverage erase
- if: ${{ matrix.python_version != 3.9 && matrix.directories == 'Dask/Spark - Testing to Disk with LatLong' }}
name: Run testing to Disk with LatLong Unit Tests (no code coverage)
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_serialization.py::test_to_disk_with_latlong -n 2 --durations 0
- if: ${{ matrix.python_version != 3.9 && matrix.directories == 'Dask/Spark - All other Serialization' }}
name: Run all other Serialization Unit Tests (no code coverage)
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_serialization.py --ignore=woodwork/tests/accessor/test_serialization.py::test_to_disk_with_latlong -n 2 --durations 0
- if: ${{ matrix.python_version != 3.9 && matrix.directories == 'Dask/Spark - Testing Table Accessor' }}
name: Run Table Accessor Unit Tests (no code coverage)
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_table_accessor.py -n 2 --durations 0
- if: ${{ matrix.python_version != 3.9 && matrix.directories == 'Dask/Spark - All Other Tests' }}
name: Run all other Unit Tests (no code coverage)
run: |
cd unpacked_sdist
pytest woodwork/ -n 2 --ignore=woodwork/tests/accessor/test_serialization.py --ignore=woodwork/tests/accessor/test_table_accessor.py --durations 0
- if: ${{ matrix.python_version == 3.9 && matrix.directories == 'Dask/Spark - Testing to Disk with LatLong' }}
name: Run Testing to Disk with LatLong Unit Tests with code coverage
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_serialization.py::test_to_disk_with_latlong -n 2 --durations 0 --cov=woodwork --cov-config=../pyproject.toml --cov-report=xml:../coverage.xml
- if: ${{ matrix.python_version == 3.9 && matrix.directories == 'Dask/Spark - All other Serialization' }}
name: Run all other Serialization Unit Tests with code coverage
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_serialization.py --ignore=woodwork/tests/accessor/test_serialization.py::test_to_disk_with_latlong -n 2 --durations 0 --cov=woodwork --cov-config=../pyproject.toml --cov-report=xml:../coverage.xml
- if: ${{ matrix.python_version == 3.9 && matrix.directories == 'Dask/Spark - Testing Table Accessor' }}
name: Run Table Accessor Unit Tests with code coverage
run: |
cd unpacked_sdist
pytest woodwork/tests/accessor/test_table_accessor.py -n 2 --durations 0 --cov=woodwork --cov-config=../pyproject.toml --cov-report=xml:../coverage.xml
- if: ${{ matrix.python_version == 3.9 && matrix.directories == 'Dask/Spark - All Other Tests' }}
name: Run all other Unit Tests with code coverage
run: |
cd unpacked_sdist
pytest woodwork/ -n 2 --ignore=woodwork/tests/accessor/test_serialization.py --ignore=woodwork/tests/accessor/test_table_accessor.py --durations 0 --cov=woodwork --cov-config=../pyproject.toml --cov-report=xml:../coverage.xml
- if: ${{ matrix.directories == 'Core' }}
name: Run Unit Tests with core requirements only (no code coverage)
run: |
cd unpacked_sdist
pytest woodwork/ -n 2
- if: ${{ matrix.python_version == 3.9 && matrix.directories != 'Core' }}
name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
fail_ci_if_error: true
files: ${{ github.workspace }}/coverage.xml
verbose: true