diff --git a/.github/workflows/caches_cron_job.yml b/.github/workflows/caches_cron_job.yml index 2fe1ffdcf4..8a3a606def 100644 --- a/.github/workflows/caches_cron_job.yml +++ b/.github/workflows/caches_cron_job.yml @@ -2,10 +2,14 @@ name: Create caches for gin ecephys data and virtual env on: workflow_dispatch: + push: # When someting is pushed into main this checks if caches need to re-created + branches: + - main schedule: - cron: "0 12 * * *" # Daily at noon UTC jobs: + create-virtual-env-cache-if-missing: name: Caching virtual env runs-on: "ubuntu-latest" diff --git a/.github/workflows/core-test.yml b/.github/workflows/core-test.yml index 979c1975d9..b17fd4ed49 100644 --- a/.github/workflows/core-test.yml +++ b/.github/workflows/core-test.yml @@ -3,7 +3,7 @@ name: Testing core on: pull_request: branches: [master] - types: [synchronize, opened, reopened, ready_for_review] + types: [synchronize, opened, reopened] concurrency: # Cancel previous workflows on the same pull request group: ${{ github.workflow }}-${{ github.ref }} @@ -21,7 +21,7 @@ jobs: - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: '3.9' - name: Install dependencies run: | python -m pip install -U pip # Official recommended way diff --git a/.github/workflows/full-test-with-codecov.yml b/.github/workflows/full-test-with-codecov.yml index d2d6397e7f..3d1a01053b 100644 --- a/.github/workflows/full-test-with-codecov.yml +++ b/.github/workflows/full-test-with-codecov.yml @@ -3,7 +3,7 @@ name: Full spikeinterface tests codecov on: workflow_dispatch: schedule: - - cron: "0 12 * * *" # Daily at noon UCT + - cron: "0 12 * * *" # Daily at noon UTC env: KACHERY_CLOUD_CLIENT_ID: ${{ secrets.KACHERY_CLOUD_CLIENT_ID }} @@ -21,7 +21,7 @@ jobs: - name: Get current year-month id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache/restore@v3 id: cache-venv with: path: ~/test_env @@ -34,16 +34,15 @@ jobs: sudo apt update # this is for datalad and download testing datasets sudo apt install git + # this is for spyking circus + # sudo apt install mpich libmpich-dev # needed for correct operation of git/git-annex/DataLad git config --global user.email "CI@example.com" git config --global user.name "CI Almighty" - # this is for spyking circus - # sudo apt install mpich libmpich-dev - # create an environement (better for caching) python -m venv ~/test_env - source ~/test_env/bin/activate python -m pip install -U pip # Official recommended way - pip install setuptools wheel twine + source ~/test_env/bin/activate + pip install tabulate # This produces summaries at the end ## clean some cache to avoid using old cache pip cache remove numpy pip cache remove hdbscan @@ -51,7 +50,6 @@ jobs: # herdingspikes need numpy to installed first, this numpy pre install will be removed when HS remove from testing pip install numpy==1.22 pip install -e .[test,extractors,full] - pip install tabulate - name: git-annex install run: | wget https://downloads.kitenet.net/git-annex/linux/current/git-annex-standalone-amd64.tar.gz @@ -69,7 +67,7 @@ jobs: id: vars run: | echo "HASH_EPHY_DATASET=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache/restore@v3 id: cache-datasets env: # the key depend on the last comit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git diff --git a/.github/workflows/full-test.yml b/.github/workflows/full-test.yml index 671f982cc4..e372b609c4 100644 --- a/.github/workflows/full-test.yml +++ b/.github/workflows/full-test.yml @@ -2,13 +2,13 @@ name: Full spikeinterface tests on: pull_request: - types: [synchronize, opened, reopened, ready_for_review] + types: [synchronize, opened, reopened] concurrency: # Cancel previous workflows on the same pull request group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true -env: +env: # For the sortingview backend KACHERY_CLOUD_CLIENT_ID: ${{ secrets.KACHERY_CLOUD_CLIENT_ID }} KACHERY_CLOUD_PRIVATE_KEY: ${{ secrets.KACHERY_CLOUD_PRIVATE_KEY }} @@ -29,11 +29,13 @@ jobs: - name: Get current year-month id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache/restore@v3 id: cache-venv with: path: ~/test_env key: ${{ runner.os }}-venv-${{ hashFiles('**/pyproject.toml') }}-${{ steps.date.outputs.date }} + restore-keys: | + ${{ runner.os }}-venv- - name: Python version run: | python --version @@ -42,16 +44,14 @@ jobs: sudo apt update # this is for datalad and download testing datasets sudo apt install git + # this is for spyking circus + # sudo apt install mpich libmpich-dev # needed for correct operation of git/git-annex/DataLad git config --global user.email "CI@example.com" git config --global user.name "CI Almighty" - # this is for spyking circus - # sudo apt install mpich libmpich-dev - # create an environement (better for caching) python -m venv ~/test_env - source ~/test_env/bin/activate python -m pip install -U pip # Official recommended way - pip install setuptools wheel twine + source ~/test_env/bin/activate pip install tabulate # This produces summaries at the end ## clean some cache to avoid using old cache pip cache remove numpy @@ -77,7 +77,7 @@ jobs: id: vars run: | echo "HASH_EPHY_DATASET=$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache/restore@v3 id: cache-datasets env: # the key depend on the last comit repo https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index f03696943f..10d680d91d 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -11,13 +11,14 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - name: Set up Python 3.9 + uses: actions/setup-python@v4 with: - python-version: 3.8 + python-version: '3.9' - name: Install dependencies for testing run: | + python -m pip install -U pip # Official recommended way pip install pytest pip install zarr pip install setuptools wheel twine build diff --git a/.github/workflows/s3-nwb-test.yml b/.github/workflows/s3-nwb-test.yml index f58626838e..08583a7ef7 100644 --- a/.github/workflows/s3-nwb-test.yml +++ b/.github/workflows/s3-nwb-test.yml @@ -2,7 +2,7 @@ name: S3 NWB Test on: pull_request: branches: [master] - types: [synchronize, opened, reopened, ready_for_review] + types: [synchronize, opened, reopened] concurrency: # Cancel previous workflows on the same pull request group: ${{ github.workflow }}-${{ github.ref }}