diff --git a/.github/workflows/add-to-catalystneuro-dashboard.yml b/.github/workflows/add_to_catalystneuro_dashboard.yml similarity index 100% rename from .github/workflows/add-to-catalystneuro-dashboard.yml rename to .github/workflows/add_to_catalystneuro_dashboard.yml diff --git a/.github/workflows/project_action.yml b/.github/workflows/add_to_nwb_dashboard.yml similarity index 100% rename from .github/workflows/project_action.yml rename to .github/workflows/add_to_nwb_dashboard.yml diff --git a/.github/workflows/Build-and-deploy-mac.yml b/.github/workflows/build_and_deploy_mac.yml similarity index 100% rename from .github/workflows/Build-and-deploy-mac.yml rename to .github/workflows/build_and_deploy_mac.yml diff --git a/.github/workflows/Build-and-deploy-win.yml b/.github/workflows/build_and_deploy_win.yml similarity index 100% rename from .github/workflows/Build-and-deploy-win.yml rename to .github/workflows/build_and_deploy_win.yml diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml deleted file mode 100644 index 243ba8ce5f..0000000000 --- a/.github/workflows/codespell.yml +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: Codespell - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - codespell: - name: Check for spelling errors - runs-on: ubuntu-latest - - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Codespell - uses: codespell-project/actions-codespell@v1 diff --git a/.github/workflows/daily_tests.yml b/.github/workflows/daily_tests.yml new file mode 100644 index 0000000000..5fe653197a --- /dev/null +++ b/.github/workflows/daily_tests.yml @@ -0,0 +1,37 @@ +name: Daily Tests + +on: + workflow_dispatch: + schedule: + - cron: "0 14 * * *" # Daily at 10am EST + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + DevTests: + uses: ./.github/workflows/testing_dev.yml + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + LiveServices: + uses: ./.github/workflows/testing_dev_with_live_services.yml + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + DANDI_STAGING_API_KEY: ${{ secrets.DANDI_STAGING_API_KEY }} + + BuildTests: + uses: ./.github/workflows/testing_flask_build_and_dist.yml + + ExampleDataCache: + uses: ./.github/workflows/example_data_cache.yml + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_GIN_BUCKET: ${{ secrets.S3_GIN_BUCKET }} + + ExampleDataTests: + needs: ExampleDataCache + uses: ./.github/workflows/testing_pipelines.yml diff --git a/.github/workflows/deploy_tests_on_pull_request.yml b/.github/workflows/deploy_tests_on_pull_request.yml new file mode 100644 index 0000000000..f006b1d2b3 --- /dev/null +++ b/.github/workflows/deploy_tests_on_pull_request.yml @@ -0,0 +1,35 @@ +name: Deploy + +on: + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + DevTests: + uses: ./.github/workflows/testing_dev.yml + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + LiveServices: + uses: ./.github/workflows/testing_dev_with_live_services.yml + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + DANDI_STAGING_API_KEY: ${{ secrets.DANDI_STAGING_API_KEY }} + + BuildTests: + uses: ./.github/workflows/testing_flask_build_and_dist.yml + + ExampleDataCache: + uses: ./.github/workflows/example_data_cache.yml + secrets: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + S3_GIN_BUCKET: ${{ secrets.S3_GIN_BUCKET }} + + ExampleDataTests: + needs: ExampleDataCache + uses: ./.github/workflows/testing_pipelines.yml diff --git a/.github/workflows/example_data_cache.yml b/.github/workflows/example_data_cache.yml new file mode 100644 index 0000000000..e27070078c --- /dev/null +++ b/.github/workflows/example_data_cache.yml @@ -0,0 +1,74 @@ +name: Example data cache +on: + workflow_call: + secrets: + AWS_ACCESS_KEY_ID: + required: true + AWS_SECRET_ACCESS_KEY: + required: true + S3_GIN_BUCKET: + required: true + +jobs: + + run: + # Will read on PR dashboard as 'Deploy / ExampleDataCache / {os}' + # Action dashboard identified by 'Deploy' + # Requirement settings identified as 'ExampleDataCache / {os}' + name: ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + python-version: ["3.12"] + os: [ubuntu-latest, macos-latest, macos-13] #, windows-latest] + + steps: + + - name: Get ephy_testing_data current head hash + id: ephys + run: echo "::set-output name=HASH_EPHY_DATASET::$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" + - name: Get cached ephys example data - ${{ steps.ephys.outputs.HASH_EPHY_DATASET }} + uses: actions/cache@v4 + id: cache-ephys-datasets + with: + path: ./ephy_testing_data + key: ephys-datasets-${{ matrix.os }}-${{ steps.ephys.outputs.HASH_EPHY_DATASET }} + - name: Get ophys_testing_data current head hash + id: ophys + run: echo "::set-output name=HASH_OPHYS_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)" + - name: Get cached ophys example data - ${{ steps.ophys.outputs.HASH_OPHYS_DATASET }} + uses: actions/cache@v4 + id: cache-ophys-datasets + with: + path: ./ophys_testing_data + key: ophys-datasets-${{ matrix.os }}-${{ steps.ophys.outputs.HASH_OPHYS_DATASET }} + - name: Get behavior_testing_data current head hash + id: behavior + run: echo "::set-output name=HASH_BEHAVIOR_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)" + - name: Get cached behavior example data - ${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }} + uses: actions/cache@v4 + id: cache-behavior-datasets + with: + path: ./behavior_testing_data + key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_behavior_DATASET }} + + - if: steps.cache-ephys-datasets.outputs.cache-hit != 'true' || steps.cache-ophys-datasets.outputs.cache-hit != 'true' || steps.cache-behavior-datasets.outputs.cache-hit != 'true' + name: Install and configure AWS CLI + run: | + pip install awscli + aws configure set aws_access_key_id ${{ secrets.AWS_ACCESS_KEY_ID }} + aws configure set aws_secret_access_key ${{ secrets.AWS_SECRET_ACCESS_KEY }} + + - if: steps.cache-ephys-datasets.outputs.cache-hit != 'true' + name: Download ephys dataset from S3 + run: | + aws s3 cp --region=us-east-2 ${{ secrets.S3_GIN_BUCKET }}/ephy_testing_data ./ephy_testing_data --recursive + - if: steps.cache-ophys-datasets.outputs.cache-hit != 'true' + name: Download ophys dataset from S3 + run: | + aws s3 cp --region=us-east-2 ${{ secrets.S3_GIN_BUCKET }}/ophys_testing_data ./ophys_testing_data --recursive + - if: steps.cache-behavior-datasets.outputs.cache-hit != 'true' + name: Download behavior dataset from S3 + run: | + aws s3 cp --region=us-east-2 ${{ secrets.S3_GIN_BUCKET }}/behavior_testing_data ./behavior_testing_data --recursive diff --git a/.github/workflows/testing-pipelines.yml b/.github/workflows/testing-pipelines.yml deleted file mode 100644 index 18b3655481..0000000000 --- a/.github/workflows/testing-pipelines.yml +++ /dev/null @@ -1,83 +0,0 @@ -name: Example Pipeline Tests -on: - schedule: - - cron: "0 16 * * *" # Daily at noon EST - pull_request: - -concurrency: # Cancel previous workflows on the same pull request - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - CACHE_NUMBER: 2 # increase to reset cache manually - -jobs: - testing: - name: Pipelines on ${{ matrix.os }} - runs-on: ${{ matrix.os }} - defaults: - run: - shell: bash -l {0} - - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, macos-latest, windows-latest] - include: - - os: ubuntu-latest - label: environments/environment-Linux.yml - - - os: macos-latest # Mac arm64 runner - label: environments/environment-MAC-apple-silicon.yml - - - os: macos-13 # Mac x64 runner - label: environments/environment-MAC-intel.yml - - - os: windows-latest - label: environments/environment-Windows.yml - - - steps: - - uses: actions/checkout@v4 - - run: git fetch --prune --unshallow --tags - - # see https://github.com/conda-incubator/setup-miniconda#caching-environments - - name: Setup Mambaforge - uses: conda-incubator/setup-miniconda@v3 - with: - miniforge-variant: Mambaforge - miniforge-version: latest - activate-environment: nwb-guide - use-mamba: true - - - name: Set cache date - id: get-date - run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT - shell: bash - - - name: Cache Conda env - uses: actions/cache@v4 - with: - path: ${{ env.CONDA }}/envs - key: conda-${{ runner.os }}-${{ runner.arch }}-${{steps.get-date.outputs.today }}-${{ hashFiles(matrix.label) }}-${{ env.CACHE_NUMBER }} - id: cache - - - if: steps.cache.outputs.cache-hit != 'true' - name: Create and activate environment - run: mamba env update --name nwb-guide --file ${{ matrix.label }} - - - name: Use Node.js 20 - uses: actions/setup-node@v4 - with: - node-version: 20 - - - name: Install GUIDE - run: npm ci --verbose - - - if: matrix.os != 'ubuntu-latest' - name: Run tests - run: npm run test:pipelines - - - if: matrix.os == 'ubuntu-latest' - name: Run tests with xvfb - run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test:pipelines diff --git a/.github/workflows/testing.yml b/.github/workflows/testing_dev.yml similarity index 81% rename from .github/workflows/testing.yml rename to .github/workflows/testing_dev.yml index 4dbace72d7..72bd3d2444 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing_dev.yml @@ -1,28 +1,24 @@ -name: Dev Tests +name: Dev tests on: - schedule: - - cron: "0 16 * * *" # Daily at noon EST - pull_request: - -concurrency: # Cancel previous workflows on the same pull request - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - CACHE_NUMBER: 2 # increase to reset cache manually + workflow_call: + secrets: + CODECOV_TOKEN: + required: true jobs: - testing: - name: ${{ matrix.os }} # Will read on the dashboard as 'Dev Tests / {os}' + + run: + # Will read on PR dashboard as 'Deploy / DevTests / {os}' + # Action dashboard identified by 'Deploy' + # Requirement settings identified as 'DevTests / {os}' + name: ${{ matrix.os }} runs-on: ${{ matrix.os }} defaults: run: shell: bash -l {0} - strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] include: - os: ubuntu-latest label: environments/environment-Linux.yml @@ -33,9 +29,8 @@ jobs: - os: macos-13 # Mac x64 runner label: environments/environment-MAC-intel.yml - - os: windows-latest - label: environments/environment-Windows.yml - +# - os: windows-latest +# label: environments/environment-Windows.yml steps: - uses: actions/checkout@v4 @@ -89,7 +84,7 @@ jobs: name: test-screenshots-${{ matrix.os }} path: docs/assets/tutorials retention-days: 1 - + overwrite: true - name: Upload coverage reports to Codecov diff --git a/.github/workflows/testing-live-services.yml b/.github/workflows/testing_dev_with_live_services.yml similarity index 79% rename from .github/workflows/testing-live-services.yml rename to .github/workflows/testing_dev_with_live_services.yml index dcf8fff649..1d4be248d5 100644 --- a/.github/workflows/testing-live-services.yml +++ b/.github/workflows/testing_dev_with_live_services.yml @@ -1,28 +1,26 @@ -name: Dev Tests (Live) +name: Dev tests with live services on: - schedule: - - cron: "0 16 * * *" # Daily at noon EST - pull_request: - -concurrency: # Cancel previous workflows on the same pull request - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - CACHE_NUMBER: 2 # increase to reset cache manually + workflow_call: + secrets: + CODECOV_TOKEN: + required: true + DANDI_STAGING_API_KEY: + required: true jobs: - testing: - name: Services on ${{ matrix.os }} # Will read on the dashboard as 'Dev Tests (Live) / Services on {os}' + + run: + # Will read on PR dashboard as 'Deploy / LiveServices / {os}' + # Action dashboard identified by 'Deploy' + # Requirement settings identified as 'LiveServices / {os}' + name: ${{ matrix.os }} runs-on: ${{ matrix.os }} defaults: run: shell: bash -l {0} - strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest, windows-latest] include: - os: ubuntu-latest label: environments/environment-Linux.yml @@ -33,9 +31,8 @@ jobs: - os: macos-13 # Mac x64 runner label: environments/environment-MAC-intel.yml - - os: windows-latest - label: environments/environment-Windows.yml - +# - os: windows-latest +# label: environments/environment-Windows.yml steps: - uses: actions/checkout@v4 @@ -87,7 +84,6 @@ jobs: name: Run tests with xvfb run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run coverage:app - - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v4 env: diff --git a/.github/workflows/pyflask-build-and-dist-tests.yml b/.github/workflows/testing_flask_build_and_dist.yml similarity index 82% rename from .github/workflows/pyflask-build-and-dist-tests.yml rename to .github/workflows/testing_flask_build_and_dist.yml index 3c4d609719..bc7390ad22 100644 --- a/.github/workflows/pyflask-build-and-dist-tests.yml +++ b/.github/workflows/testing_flask_build_and_dist.yml @@ -1,31 +1,23 @@ -name: Build Tests +name: Test Flask build and dev tests on Flask distributable on: - schedule: - - cron: "0 16 * * *" # Daily at noon EST - pull_request: - workflow_dispatch: - -# Cancel previous workflows on the same pull request -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - CACHE_NUMBER: 1 # increase to reset cache manually + workflow_call: jobs: - testing: - name: PyInstaller on ${{ matrix.os }} # Will read on the dashboard as 'Build Tests / PyInstaller on {os}' + + run: + # Will read on PR dashboard as 'Deploy / BuildTests / {os}' + # Action dashboard identified by 'Deploy' + # Requirement settings identified as 'BuildTests / {os}' + name: ${{ matrix.os }} runs-on: ${{ matrix.os }} defaults: run: shell: bash -l {0} - strategy: fail-fast: false matrix: include: - # current linux installation instructions use dev mode instead of distributable + # linux installation instructions use dev mode instead of distributable # - python-version: "3.9" # os: ubuntu-latest # label: environments/environment-Linux.yml @@ -88,7 +80,7 @@ jobs: - run: npm ci --verbose - # fix for macos build + # Fix for macos build - remove bad sonpy file - if: matrix.os == 'macos-latest' || matrix.os == 'macos-13' run: rm -f /Users/runner/miniconda3/envs/nwb-guide/lib/python3.9/site-packages/sonpy/linux/sonpy.so diff --git a/.github/workflows/testing_pipelines.yml b/.github/workflows/testing_pipelines.yml new file mode 100644 index 0000000000..1a4ca41d6f --- /dev/null +++ b/.github/workflows/testing_pipelines.yml @@ -0,0 +1,133 @@ +name: Example data pipeline Tests +on: + workflow_call: + +jobs: + + run: + # Will read on PR dashboard as 'Deploy / ExampleDataTests / {os}' + # Action dashboard identified by 'Deploy' + # Requirement settings identified as 'ExampleDataTests / {os}' + name: ${{ matrix.os }} + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + label: environments/environment-Linux.yml + + - os: macos-latest # Mac arm64 runner + label: environments/environment-MAC-apple-silicon.yml + + - os: macos-13 # Mac x64 runner + label: environments/environment-MAC-intel.yml + +# - os: windows-latest +# label: environments/environment-Windows.yml + + + steps: + - uses: actions/checkout@v4 + - run: git fetch --prune --unshallow --tags + + # see https://github.com/conda-incubator/setup-miniconda#caching-environments + - name: Setup Mambaforge + uses: conda-incubator/setup-miniconda@v3 + with: + miniforge-variant: Mambaforge + miniforge-version: latest + activate-environment: nwb-guide + use-mamba: true + + # Setup conda environment from cache + - name: Set environment cache date + id: get-date + run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT + shell: bash + - name: Cache Conda env + uses: actions/cache@v4 + with: + path: ${{ env.CONDA }}/envs + key: conda-${{ runner.os }}-${{ runner.arch }}-${{steps.get-date.outputs.today }}-${{ hashFiles(matrix.label) }}-${{ env.CACHE_NUMBER }} + id: cache + - if: steps.cache.outputs.cache-hit != 'true' + name: Create and activate environment + run: mamba env update --name nwb-guide --file ${{ matrix.label }} + + - name: Use Node.js 20 + uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install GUIDE + run: npm ci --verbose + + # Load example data caches + - name: Get ephy_testing_data current head hash + id: ephys + run: echo "::set-output name=HASH_EPHY_DATASET::$(git ls-remote https://gin.g-node.org/NeuralEnsemble/ephy_testing_data.git HEAD | cut -f1)" + - name: Cache ephys dataset - ${{ steps.ephys.outputs.HASH_EPHY_DATASET }} + uses: actions/cache@v4 + id: cache-ephys-datasets + with: + path: ./ephy_testing_data + key: ephys-datasets-${{ matrix.os }}-${{ steps.ephys.outputs.HASH_EPHY_DATASET }} + - name: Get ophys_testing_data current head hash + id: ophys + run: echo "::set-output name=HASH_OPHYS_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/ophys_testing_data.git HEAD | cut -f1)" + - name: Cache ophys dataset - ${{ steps.ophys.outputs.HASH_OPHYS_DATASET }} + uses: actions/cache@v4 + id: cache-ophys-datasets + with: + path: ./ophys_testing_data + key: ophys-datasets-${{ matrix.os }}-${{ steps.ophys.outputs.HASH_OPHYS_DATASET }} + - name: Get behavior_testing_data current head hash + id: behavior + run: echo "::set-output name=HASH_BEHAVIOR_DATASET::$(git ls-remote https://gin.g-node.org/CatalystNeuro/behavior_testing_data.git HEAD | cut -f1)" + - name: Cache behavior dataset - ${{ steps.behavior.outputs.HASH_BEHAVIOR_DATASET }} + uses: actions/cache@v4 + id: cache-behavior-datasets + with: + path: ./behavior_testing_data + key: behavior-datasets-${{ matrix.os }}-${{ steps.behavior.outputs.HASH_behavior_DATASET }} + + - name: Save working directory to environment file + run: echo "GIN_DATA_DIR=$(pwd)" >> .env + if: runner.os != 'Windows' + + - name: Save working directory to environment file (Windows) + run: echo GIN_DATA_DIR=%cd% >> .env + shell: bash + if: runner.os == 'Windows' + + # Display environment file for debugging + - name: Print environment file + run: cat .env + if: runner.os != 'Windows' + + - name: Print environment file + run: type .env + shell: bash + if: runner.os == 'Windows' + + # Run pipeline tests + - if: matrix.os != 'ubuntu-latest' + name: Run tests + run: npm run test:pipelines + + - if: matrix.os == 'ubuntu-latest' + name: Run tests with xvfb + run: xvfb-run --auto-servernum --server-args="-screen 0 1280x960x24" -- npm run test:pipelines + + - name: Archive Pipeline Test Screenshots + if: always() + uses: actions/upload-artifact@v4 + with: + name: test-pipelines-screenshots-${{ matrix.os }} + path: docs/assets/tutorials/test-pipelines + retention-days: 1 + overwrite: true diff --git a/.github/workflows/update-package.yml b/.github/workflows/update-package.yml deleted file mode 100644 index bbc041ca95..0000000000 --- a/.github/workflows/update-package.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Update package.lock - -on: - workflow_dispatch: - -jobs: - deploy: - runs-on: macos-latest - steps: - - uses: actions/checkout@v4 - - name: Checkout New Branch - run: git checkout -b update_package_lock - - name: Update package.lock file - run: npm install --ignore-scripts --verbose - - name: Commit Changes and Create Pull Request - run: | - git config --global user.email 41898282+github-actions[bot]@users.noreply.github.com - git config --global user.name github-actions[bot] - git commit . -m "Update package.lock file" - git push origin update_package_lock - gh pr create --title "[Github.CI] Update package.lock file" --body "Updated package.lock file to match package.json" - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 865e043ba7..a6393f3dad 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ _build dist out tests/screenshots +docs/assets/tutorials/test-pipelines coverage diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 65e3078722..169e416899 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,3 +23,10 @@ repos: hooks: - id: prettier types_or: [css, javascript] + +- repo: https://github.com/codespell-project/codespell + rev: v2.2.6 + hooks: + - id: codespell + additional_dependencies: + - tomli diff --git a/docs/assets/tutorials/pipelines/created.png b/docs/assets/tutorials/pipelines/created.png new file mode 100644 index 0000000000..02f57c293d Binary files /dev/null and b/docs/assets/tutorials/pipelines/created.png differ diff --git a/docs/assets/tutorials/pipelines/list.png b/docs/assets/tutorials/pipelines/list.png new file mode 100644 index 0000000000..0393f05ced Binary files /dev/null and b/docs/assets/tutorials/pipelines/list.png differ diff --git a/src/electron/frontend/core/components/pages/guided-mode/options/utils.js b/src/electron/frontend/core/components/pages/guided-mode/options/utils.js index 6a2764cf20..259c49c74b 100644 --- a/src/electron/frontend/core/components/pages/guided-mode/options/utils.js +++ b/src/electron/frontend/core/components/pages/guided-mode/options/utils.js @@ -75,7 +75,6 @@ export const run = async (pathname, payload, options = {}) => { // Clear private keys from being passed payload = sanitize(structuredClone(payload)); - console.warn("FETCH?"); const results = await fetch(new URL(pathname, baseUrl), { method: "POST", headers: { "Content-Type": "application/json" }, @@ -84,7 +83,6 @@ export const run = async (pathname, payload, options = {}) => { }) .then(async (res) => { const json = await res.json(); - console.warn("FETCH!", json); if (!res.ok) { const message = json.message; @@ -95,7 +93,6 @@ export const run = async (pathname, payload, options = {}) => { return json; }) .finally(() => { - console.warn("CANCEL?"); if (internalSwal) Swal.close(); }); diff --git a/src/electron/frontend/core/components/pages/settings/SettingsPage.js b/src/electron/frontend/core/components/pages/settings/SettingsPage.js index 74e8501f49..732ba905de 100644 --- a/src/electron/frontend/core/components/pages/settings/SettingsPage.js +++ b/src/electron/frontend/core/components/pages/settings/SettingsPage.js @@ -348,20 +348,20 @@ export class SettingsPage extends Page { const resolved = pipelineNames.reverse().map((name) => { try { saveNewPipelineFromYaml(name, examplePipelines[name], testing_data_folder); - return true; + return { name, success: true }; } catch (e) { console.error(e); - return name; + return { name, error: e.message }; } }); - const nSuccessful = resolved.reduce((acc, v) => (acc += v === true ? 1 : 0), 0); + const nSuccessful = resolved.reduce((acc, v) => (acc += v.success === true ? 1 : 0), 0); const nFailed = resolved.length - nSuccessful; if (nFailed) { const failDisplay = nFailed === 1 - ? `the ${resolved.find((v) => typeof v === "string")} pipeline` + ? `the ${resolved.find((v) => !v.success).name} pipeline` : `${nFailed} pipelines`; this.#openNotyf( `

Generated ${nSuccessful} test pipelines.

Could not find source data for ${failDisplay}.`, @@ -373,6 +373,8 @@ export class SettingsPage extends Page { `

Pipeline Generation Failed

Could not find source data for any pipelines.`, "error" ); + + return resolved; }, }); diff --git a/src/example_pipelines.yml b/src/example_pipelines.yml index 2830d37572..ae05a3a0c7 100644 --- a/src/example_pipelines.yml +++ b/src/example_pipelines.yml @@ -16,10 +16,15 @@ # NOTE: Can also just provide interfaces directly below pipeline name SpikeGLX-Phy: - SpikeGLXRecordingInterface: - file_path: ephy_testing_data/spikeglx/Noise4Sam_g0/Noise4Sam_g0_imec0/Noise4Sam_g0_t0.imec0.ap.bin - PhySortingInterface: - folder_path: ephy_testing_data/phy/phy_example_0 + metadata: + NWBFile: + session_start_time: 2024-05-09T00:00 # NOTE: For some reason, required for the CI to pass... + + interfaces: + SpikeGLXRecordingInterface: + file_path: ephy_testing_data/spikeglx/Noise4Sam_g0/Noise4Sam_g0_imec0/Noise4Sam_g0_t0.imec0.ap.bin + PhySortingInterface: + folder_path: ephy_testing_data/phy/phy_example_0 SpikeGLX_v1_SingleProbe_AP: diff --git a/tests/e2e/pipelines.test.ts b/tests/e2e/pipelines.test.ts index ec6220bb36..3c0f41ea88 100644 --- a/tests/e2e/pipelines.test.ts +++ b/tests/e2e/pipelines.test.ts @@ -1,32 +1,34 @@ -import { describe, expect, test, skip, beforeAll } from 'vitest' -import { connect } from '../puppeteer' +import { describe, expect, test, beforeAll } from 'vitest' +import { join } from 'node:path' import { mkdirSync, existsSync } from 'node:fs' -import { join, dirname } from 'node:path' -import { fileURLToPath } from 'node:url' import { homedir } from 'node:os' -const __dirname = dirname(fileURLToPath(import.meta.url)); -const screenshotPath = join( __dirname, 'screenshots') -mkdirSync(screenshotPath, { recursive: true }) - import examplePipelines from "../../src/example_pipelines.yml"; import paths from "../../src/paths.config.json" assert { type: "json" }; -import { evaluate, initTests } from './utils' + +import { evaluate, initTests, takeScreenshot } from './utils' import { header } from '../../src/electron/frontend/core/components/forms/utils' +import { sleep } from '../puppeteer'; + -// NOTE: We assume the user has put the GIN data in ~/NWB_GUIDE/test-data -const guideRootPath = join(homedir(), paths.root) -const testGINPath = join(guideRootPath, 'test-data', 'GIN') -const hasGINPath = existsSync(testGINPath) -const pipelineDescribeFn = hasGINPath ? describe : describe.skip +// NOTE: We assume the user has put the GIN data in ~/NWB_GUIDE/test-data/GIN +const testGINPath = process.env.GIN_DATA_DIR ?? join(homedir(), paths.root, 'test-data', 'GIN') +console.log('Using test GIN data at:', testGINPath) + +const pipelineDescribeFn = existsSync(testGINPath) ? describe : describe.skip beforeAll(() => initTests({ screenshots: false, data: false })) describe('Run example pipelines', () => { + test('Ensure test data is present', () => { + expect(existsSync(testGINPath)).toBe(true) + }) + + test('Ensure number of example pipelines starts at zero', async () => { const nPipelines = await evaluate(() => document.getElementById('guided-div-resume-progress-cards').children.length) expect(nPipelines).toBe(0) @@ -37,31 +39,44 @@ describe('Run example pipelines', () => { test('All example pipelines are created', async ( ) => { - await evaluate(async (testGINPath) => { + const result = await evaluate(async (testGINPath) => { // Transition to settings page const dashboard = document.querySelector('nwb-dashboard') dashboard.sidebar.select('settings') await new Promise(resolve => setTimeout(resolve, 200)) - // Generate example pipelines const page = dashboard.page - const folderInput = page.form.getFormElement(["developer", "testing_data_folder"]) - folderInput.updateData(testGINPath) // Open relevant accordion const accordion = page.form.accordions['developer'] accordion.toggle(true) + // Generate example pipelines + const folderInput = page.form.getFormElement(["developer", "testing_data_folder"]) + folderInput.updateData(testGINPath) const button = folderInput.nextSibling - await button.onClick() + const results = await button.onClick() page.save() + page.dismiss() // Dismiss any notifications + + return results + }, testGINPath) + await sleep(500) // Wait for notification to dismiss + + const allPipelineNames = Object.keys(examplePipelines).reverse() + + expect(result).toEqual(allPipelineNames.map(name => { return {name, success: true} })) + + await takeScreenshot(join('test-pipelines', 'created')) + + // Transition back to the conversions page and count pipelines - const pipelineNames = await evaluate(async () => { + const renderedPipelineNames = await evaluate(async () => { const dashboard = document.querySelector('nwb-dashboard') dashboard.sidebar.select('/') await new Promise(resolve => setTimeout(resolve, 200)) @@ -69,8 +84,11 @@ describe('Run example pipelines', () => { return Array.from(pipelineCards).map(card => card.info.project.name) }) + await takeScreenshot(join('test-pipelines', 'list'), 500) + + // Assert all the pipelines are present - expect(pipelineNames.sort()).toEqual(Object.keys(examplePipelines).map(header).sort()) + expect(renderedPipelineNames.sort()).toEqual(allPipelineNames.map(header).sort()) })