diff --git a/.github/workflows/Build-and-deploy-mac.yml b/.github/workflows/Build-and-deploy-mac.yml index e4ee051bf..dd7a77988 100644 --- a/.github/workflows/Build-and-deploy-mac.yml +++ b/.github/workflows/Build-and-deploy-mac.yml @@ -33,6 +33,9 @@ jobs: - name: Install package.json modules and their dependencies run: npm install --ignore-scripts + - name: Remove bad sonpy file (might make Spike2 format unusable on Mac - should exclude from selection) + run: rm -f /usr/local/miniconda/envs/nwb-guide/lib/python3.9/site-packages/sonpy/linux/sonpy.so + - uses: apple-actions/import-codesign-certs@v1 with: # https://developer.apple.com/account/resources/certificates/add @@ -56,5 +59,4 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} appleId: ${{ secrets.APPLE_ID }} # currently this is set to Ryan's Apple ID and password appleIdPassword: ${{ secrets.APPLE_PASSWORD }} - run: | - npm run deploy:mac + run: npm run deploy:mac diff --git a/.github/workflows/pyflask-build-and-dist-tests.yml b/.github/workflows/pyflask-build-and-dist-tests.yml index e2e983010..5fbea858d 100644 --- a/.github/workflows/pyflask-build-and-dist-tests.yml +++ b/.github/workflows/pyflask-build-and-dist-tests.yml @@ -83,6 +83,10 @@ jobs: - run: npm ci + # fix for macos build + - if: matrix.os == 'macos-latest' + run: rm -f /Users/runner/miniconda3/envs/nwb-guide/lib/python3.9/site-packages/sonpy/linux/sonpy.so + - name: Build PyFlask distribution run: npm run build:flask:${{ matrix.shorthand }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d3f9b3e7f..5cbf5fcc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,7 +6,7 @@ repos: - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 23.7.0 + rev: 23.9.1 hooks: - id: black exclude: ^docs/ diff --git a/docs/format_support.rst b/docs/format_support.rst new file mode 100644 index 000000000..d2f35083e --- /dev/null +++ b/docs/format_support.rst @@ -0,0 +1,7 @@ +Ecosystem Format Support +======================================= +The following is a live record of all the supported formats in the NWB GUIDE and underlying ecosystem. + +.. raw:: html + + diff --git a/docs/index.rst b/docs/index.rst index 57fa8202f..432034626 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,3 +19,4 @@ The resulting files are fully compliant with the best practices expected of the :maxdepth: 2 developer_guide + format_support diff --git a/environments/environment-Linux.yml b/environments/environment-Linux.yml index 87a4fe4a1..d55ea40bf 100644 --- a/environments/environment-Linux.yml +++ b/environments/environment-Linux.yml @@ -1,31 +1,22 @@ name: nwb-guide channels: - - defaults - conda-forge + - defaults dependencies: - - python = 3.9.17 - - PyInstaller = 4.7 - - nodejs = 16.14.2 - - libgcc = 7.2.0 - - git = 2.20.1 + - python = 3.9.18 + - PyInstaller = 5.13.0 + - nodejs = 18.16.1 + - numcodecs = 0.11.0 + # install these from conda-forge so that dependent packages get included in the distributable + - jsonschema = 4.18.0 # installs jsonschema-specifications + - pydantic[email] = 2.0.2 # installs email-validator - pip - pip: - - chardet == 4.0.0 - - pandas == 1.4.2 - - openpyxl == 3.0.3 - - pennsieve == 6.1.2 - - configparser == 4.0.2 - - python-docx == 0.8.10 - - xlrd == 1.2.0 - - biopython == 1.79 - - flask == 2.0.2 - - flask_restx - - protobuf == 3.20.0 - - gevent == 21.12.0 - - werkzeug == 2.0.2 - - opencv-python == 4.5.3.56 + - chardet == 5.1.0 + - configparser == 6.0.0 + - flask == 2.3.2 + - flask-cors == 4.0.0 + - flask_restx == 1.1.0 - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full] - - hdmf >= 3.7.0 - - pytest == 7.2.2 + - pytest == 7.4.0 - pytest-cov == 4.1.0 - - flask-cors === 3.0.10 diff --git a/environments/environment-MAC-arm64.yml b/environments/environment-MAC-arm64.yml index 4c0ca3ae3..6e4508a61 100644 --- a/environments/environment-MAC-arm64.yml +++ b/environments/environment-MAC-arm64.yml @@ -3,21 +3,23 @@ channels: - conda-forge - defaults dependencies: - - python = 3.9.17 + - python = 3.9.18 - PyInstaller = 5.13.0 - nodejs = 18.16.1 - numcodecs = 0.11.0 - lxml = 4.9.3 # pypi build fails due to x64/arm64 mismatch so install from conda-forge - pyedflib = 0.1.32 # pypi build fails due to x64/arm64 mismatch so install from conda-forge - - jsonschema = 4.18.0 # this installs jsonschema-specifications which is needed for validation - - pydantic[email] = 2.0.2 # email validator is used by dandi - numpy # may have x64/arm64 mismatch issues so install from conda-forge + # install these from conda-forge so that dependent packages get included in the distributable + - jsonschema = 4.18.0 # installs jsonschema-specifications + - pydantic[email] = 2.0.2 # installs email-validator - pip - pip: - chardet == 5.1.0 - configparser == 6.0.0 - flask == 2.3.2 - - flask_restx == 1.1.0 - flask-cors == 4.0.0 - - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[ecephys] + - flask_restx == 1.1.0 + - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full] - pytest == 7.4.0 + - pytest-cov == 4.1.0 diff --git a/environments/environment-MAC.yml b/environments/environment-MAC.yml index cd76d4596..d55ea40bf 100644 --- a/environments/environment-MAC.yml +++ b/environments/environment-MAC.yml @@ -1,22 +1,22 @@ name: nwb-guide channels: - - defaults - conda-forge + - defaults dependencies: - - python = 3.9.17 - - nodejs = 18.16.1 + - python = 3.9.18 - PyInstaller = 5.13.0 + - nodejs = 18.16.1 - numcodecs = 0.11.0 - - jsonschema = 4.18.0 # this installs jsonschema-specifications which is needed for validation - - pydantic[email] = 2.0.2 # email validator is used by dandi + # install these from conda-forge so that dependent packages get included in the distributable + - jsonschema = 4.18.0 # installs jsonschema-specifications + - pydantic[email] = 2.0.2 # installs email-validator - pip - pip: - chardet == 5.1.0 - configparser == 6.0.0 - flask == 2.3.2 - - flask-cors === 3.0.10 + - flask-cors == 4.0.0 - flask_restx == 1.1.0 - neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main#neuroconv[full] - - hdmf >= 3.7.0 - - pytest == 7.2.2 + - pytest == 7.4.0 - pytest-cov == 4.1.0 diff --git a/environments/environment-Windows.yml b/environments/environment-Windows.yml index 7faeba0ee..eb846dee2 100644 --- a/environments/environment-Windows.yml +++ b/environments/environment-Windows.yml @@ -20,3 +20,4 @@ dependencies: - hdmf >= 3.7.0 - pytest == 7.2.2 - pytest-cov == 4.1.0 + - email-validator == 2.0.0 diff --git a/package.json b/package.json index bad2cfe3e..3ea6741de 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "nwb-guide", "productName": "NWB GUIDE", - "version": "0.0.4", + "version": "0.0.5", "description": "", "main": "./build/main/main.js", "scripts": { @@ -15,16 +15,17 @@ "build:win": "npm run build && npm run build:flask:win && npm run build:electron:win", "build:mac": "npm run build && npm run build:flask:unix && npm run build:electron:mac", "build:linux": "npm run build && npm run build:flask:unix && npm run build:electron:linux", - "build:flask:base": "python -m PyInstaller --name nwb-guide --onedir --clean --noconfirm ./pyflask/app.py --distpath ./build/flask --collect-data jsonschema_specifications --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all ci_info --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils", + "build:flask:base": "python -m PyInstaller --log-level DEBUG --name nwb-guide --onedir --clean --noconfirm ./pyflask/app.py --distpath ./build/flask --collect-data jsonschema_specifications --collect-all nwbinspector --collect-all neuroconv --collect-all pynwb --collect-all hdmf --collect-all ci_info --collect-all ndx_dandi_icephys --hidden-import scipy._distributor_init --hidden-import scipy._lib.messagestream --hidden-import scipy._lib._ccallback --hidden-import scipy._lib._testutils --hidden-import email_validator", "build:flask:win": "npm run build:flask:base -- --add-data ./paths.config.json;. --add-data ./package.json;.", - "build:flask:unix": "npm run build:flask:base -- --add-data ./paths.config.json:. --add-data ./package.json:. --collect-all ndx_dandi_icephys", + "build:flask:unix": "npm run build:flask:base -- --add-data ./paths.config.json:. --add-data ./package.json:.", "build:electron:win": "electron-builder build --win --publish never", "build:electron:mac": "electron-builder build --mac --publish never", "build:electron:linux": "electron-builder build --linux --publish never", "test": "npm run test:app && npm run test:server", "test:app": "vitest run", "test:server": "pytest pyflask/tests/ -s", - "test:executable": "concurrently -n EXE,TEST --kill-others \"node tests/testPyinstallerExecutable.js --port 3434 --forever\" \"pytest pyflask/tests/ -s --target http://localhost:3434\"", + "wait3s": "node -e \"setTimeout(() => process.exit(0),3000)\"", + "test:executable": "concurrently -n EXE,TEST --kill-others --success first \"node tests/testPyinstallerExecutable.js --port 3434 --forever\" \"npm run wait3s && pytest pyflask/tests/ -s --target http://localhost:3434\"", "test:coverage": "npm run coverage:app && npm run coverage:server", "coverage:app": "vitest run --coverage", "coverage:server": "pytest pyflask/tests/ -s --cov=pyflask --cov-report=xml", diff --git a/pyflask/apis/startup.py b/pyflask/apis/startup.py index 3824a9465..d72ae3cdc 100644 --- a/pyflask/apis/startup.py +++ b/pyflask/apis/startup.py @@ -1,6 +1,8 @@ """API endpoint definitions for startup operations.""" from flask_restx import Namespace, Resource +from errorHandlers import notBadRequestException + startup_api = Namespace("startup", description="API for startup commands related to the NWB GUIDE.") parser = startup_api.parser() @@ -19,3 +21,25 @@ class Echo(Resource): def get(self): args = parser.parse_args() return args["arg"] + + +@startup_api.route("/preload-imports") +class PreloadImports(Resource): + """ + Preload various imports on startup instead of waiting for them later on. + + Python caches all modules that have been imported at least once in the same kernel, + even if their namespace is not always exposed to a given scope. This means that later imports + simply expose the cached namespaces to their scope instead of retriggering the entire import. + """ + + @startup_api.doc(responses={200: "Success", 400: "Bad Request", 500: "Internal server error"}) + def get(self): + try: + import neuroconv + + return True + except Exception as exception: + if notBadRequestException(exception=exception): + startup_api.abort(500, str(exception)) + raise exception diff --git a/pyflask/tests/test_neuroconv.py b/pyflask/tests/test_neuroconv.py index 8c363c004..4f0e793f5 100644 --- a/pyflask/tests/test_neuroconv.py +++ b/pyflask/tests/test_neuroconv.py @@ -2,9 +2,8 @@ from utils import get, post, get_converter_output_schema -# --------------------- Tests --------------------- -# Accesses the dictionary of all interfaces and their metadata def test_get_all_interfaces(client): + """Accesses the dictionary of all interfaces and their metadata.""" validate( get("neuroconv", client), schema={ @@ -23,14 +22,14 @@ def test_get_all_interfaces(client): ) -# Test single interface schema request def test_single_schema_request(client): + """Test single interface schema request.""" interfaces = {"myname": "SpikeGLXRecordingInterface"} validate(post("neuroconv/schema", interfaces, client), schema=get_converter_output_schema(interfaces)) -# Uses the NWBConverter Class to combine multiple interfaces def test_multiple_schema_request(client): + """Uses the NWBConverter Class to combine multiple interfaces.""" interfaces = {"myname": "SpikeGLXRecordingInterface", "myphyinterface": "PhySortingInterface"} data = post("/neuroconv/schema", interfaces, client) validate(data, schema=get_converter_output_schema(interfaces)) diff --git a/pyflask/tests/test_startup.py b/pyflask/tests/test_startup.py new file mode 100644 index 000000000..42cead8c7 --- /dev/null +++ b/pyflask/tests/test_startup.py @@ -0,0 +1,7 @@ +from utils import get, post, get_converter_output_schema + + +def test_preload_imports(client): + """Verify that the preload import endpoint returned good status.""" + result = get("startup/preload-imports", client) + assert result == True diff --git a/src/main/main.ts b/src/main/main.ts index d00c1219b..da68ea02b 100755 --- a/src/main/main.ts +++ b/src/main/main.ts @@ -383,7 +383,7 @@ function makeSingleInstance() { else app.on("second-instance", () => restoreWindow()); } -if (process.platform === 'darwin') initialize(); +initialize(); app.on('activate', () => { if (BrowserWindow.getAllWindows().length === 0) initialize() diff --git a/src/renderer/assets/css/global.css b/src/renderer/assets/css/global.css index 69b046c46..252a2e657 100755 --- a/src/renderer/assets/css/global.css +++ b/src/renderer/assets/css/global.css @@ -8,6 +8,15 @@ /* src: local('Source Code Pro'), local('SourceCodePro'), url(fonts/SourceCodePro-Regular.ttf) format('truetype'); */ } +/* Notfy */ +.notyf__toast { + max-width: clamp(300px, 40vw, 500px) !important; +} + +.notyf__message { + word-wrap: break-word; +} + /* Global ---------------------------- */ * { diff --git a/src/renderer/src/dependencies/simple.js b/src/renderer/src/dependencies/simple.js index 35c3adafd..97f8d2148 100644 --- a/src/renderer/src/dependencies/simple.js +++ b/src/renderer/src/dependencies/simple.js @@ -12,4 +12,11 @@ export const homeDirectory = app?.getPath("home") ?? ""; export const appDirectory = homeDirectory ? joinPath(homeDirectory, paths.root) : ""; export const guidedProgressFilePath = homeDirectory ? joinPath(appDirectory, ...paths.subfolders.progress) : ""; +export const stubSaveFolderPath = homeDirectory + ? joinPath(homeDirectory, paths["root"], ...paths.subfolders.stubs) + : ""; +export const conversionSaveFolderPath = homeDirectory + ? joinPath(homeDirectory, paths["root"], ...paths.subfolders.conversions) + : ""; + export const isStorybook = window.location.href.includes("iframe.html"); diff --git a/src/renderer/src/index.ts b/src/renderer/src/index.ts index efbc4b093..c13b6f258 100644 --- a/src/renderer/src/index.ts +++ b/src/renderer/src/index.ts @@ -94,18 +94,22 @@ async function checkInternetConnection() { return hasInternet }; -// Check if the Pysoda server is live +// Check if the Flask server is live const serverIsLiveStartup = async () => { const echoResponse = await fetch(`${baseUrl}/startup/echo?arg=server ready`).then(res => res.json()).catch(e => e) return echoResponse === "server ready" ? true : false; }; +// Preload Flask imports for faster on-demand operations +const preloadFlaskImports = async () => await fetch(`${baseUrl}/startup/preload-imports`).then(res => res.json()).catch(e => e) + let openPythonStatusNotyf: undefined | any; async function pythonServerOpened() { // Confirm requests are actually received by the server const isLive = await serverIsLiveStartup() + if (isLive) await preloadFlaskImports() // initiate preload of Flask imports if (!isLive) return pythonServerClosed() // Update server status and throw a notification diff --git a/src/renderer/src/progress/index.js b/src/renderer/src/progress/index.js index b5c7c2121..b3dfb9ea8 100644 --- a/src/renderer/src/progress/index.js +++ b/src/renderer/src/progress/index.js @@ -1,6 +1,13 @@ import Swal from "sweetalert2"; -import { guidedProgressFilePath, reloadPageToHome, isStorybook, appDirectory } from "../dependencies/simple.js"; +import { + guidedProgressFilePath, + reloadPageToHome, + isStorybook, + appDirectory, + stubSaveFolderPath, + conversionSaveFolderPath, +} from "../dependencies/simple.js"; import { fs } from "../electron/index.js"; import { joinPath, runOnLoad } from "../globals.js"; import { merge } from "../stories/pages/utils.js"; @@ -102,14 +109,14 @@ export function resume(name) { export const remove = async (name) => { const result = await Swal.fire({ - title: `Are you sure you would like to delete NWB GUIDE progress made on the dataset: ${name}?`, - text: "Your progress file will be deleted permanently, and all existing progress will be lost.", + title: `Are you sure you would like to delete this conversion pipeline?`, + html: `All related files will be deleted permanently, and existing progress will be lost.`, icon: "warning", heightAuto: false, showCancelButton: true, confirmButtonColor: "#3085d6", cancelButtonColor: "#d33", - confirmButtonText: "Delete progress file", + confirmButtonText: `Delete ${name}`, cancelButtonText: "Cancel", focusCancel: true, }); @@ -119,9 +126,17 @@ export const remove = async (name) => { const progressFilePathToDelete = joinPath(guidedProgressFilePath, name + ".json"); //delete the progress file - if (fs) fs.unlinkSync(progressFilePathToDelete, (err) => console.log(err)); + if (fs) fs.unlinkSync(progressFilePathToDelete); else localStorage.removeItem(progressFilePathToDelete); + if (fs) { + // delete default stub location + fs.rmSync(joinPath(stubSaveFolderPath, name), { recursive: true, force: true }); + + // delete default conversion location + fs.rmSync(joinPath(conversionSaveFolderPath, name), { recursive: true, force: true }); + } + return true; } diff --git a/src/renderer/src/stories/DandiResults.js b/src/renderer/src/stories/DandiResults.js new file mode 100644 index 000000000..86cb1e29b --- /dev/null +++ b/src/renderer/src/stories/DandiResults.js @@ -0,0 +1,107 @@ +import { LitElement, css, html } from "lit"; + +import { get } from "dandi"; +import { isStaging } from "./pages/uploads/UploadsPage.js"; + +export class DandiResults extends LitElement { + static get styles() { + return css` + :host { + display: block; + } + `; + } + + constructor(props) { + super(); + Object.assign(this, props); + } + + async updated() { + const handleId = (str, info) => { + let value = info[str]; + if (str === "modified") value = new Date(value).toString(); + + const el = this.shadowRoot.querySelector(`#${str}`); + el.innerText = value; + + if (el.tagName === "A") { + if (str === "doi") value = `http://doi.org/${value}`; + el.href = value; + el.target = "_blank"; + } + }; + + const elIds = ["name", "modified"]; + + const otherElIds = ["embargo_status"]; + + const liveId = this.id; // '000552' // From Huszar + const dandiset = await get(liveId, isStaging(this.id) ? "staging" : undefined); + + otherElIds.forEach((str) => handleId(str, dandiset)); + elIds.forEach((str) => handleId(str, dandiset.draft_version)); + + const info = await dandiset.getInfo({ version: dandiset.draft_version.version }); + + const secondElIds = ["description", "url"]; + secondElIds.forEach((str) => handleId(str, info)); + + const publicationEl = this.shadowRoot.querySelector(`#publication`); + publicationEl.innerHTML = ""; + const publications = (info.relatedResource ?? []).filter((o) => o.relation === "dcite:IsDescribedBy"); + + if (publications.length) + publicationEl.append( + ...(await Promise.all( + publications.map(async (o) => { + const li = document.createElement("li"); + const { message } = await fetch( + `http://api.crossref.org/works${new URL(o.identifier).pathname}` + ).then((res) => res.json()); + li.innerHTML = `${message.author.map((o) => `${o.family}, ${o.given[0]}.`).join(", ")} (${ + message.created["date-parts"][0][0] + }). ${message.title[0]}. ${message["container-title"]}, ${message.volume}(${ + message.issue + }), ${message.page}. doi:${message.DOI}`; + return li; + }) + )) + ); + else publicationEl.innerText = "N/A"; + } + + render() { + return html` +
Your conversion failed. Please try again.