From de7d01157c113310ce71b7aa4db4785f96d7d546 Mon Sep 17 00:00:00 2001 From: Xiao Gui Date: Thu, 22 Aug 2024 13:27:09 +0200 Subject: [PATCH] fix: siibra-api parcellation map to use siibra-python v2 --- .helm/siibra-api/Chart.yaml | 4 +- .../templates/deployment-worker-v4.yaml | 91 ++++++++ .helm/siibra-api/values.yaml | 4 +- api/common/data_handlers/__init__.py | 1 - api/common/data_handlers/volumes/__init__.py | 1 - .../data_handlers/volumes/parcellationmap.py | 203 ------------------ api/server/volumes/parcellationmap.py | 19 +- new_api/common/__init__.py | 1 + new_api/common/decorators.py | 2 + new_api/common/storage.py | 19 ++ new_api/data_handlers/__init__.py | 0 new_api/data_handlers/maps.py | 124 +++++++++++ new_api/v3/data_handlers/map/__init__.py | 75 ++++++- new_api/v3/models/volumes/volume.py | 4 +- new_api/v3/serialization/__init__.py | 1 + new_api/v3/serialization/_common.py | 2 +- new_api/v3/serialization/map.py | 89 ++++++++ worker-v4.dockerfile | 2 +- 18 files changed, 417 insertions(+), 225 deletions(-) create mode 100644 .helm/siibra-api/templates/deployment-worker-v4.yaml delete mode 100644 api/common/data_handlers/volumes/__init__.py delete mode 100644 api/common/data_handlers/volumes/parcellationmap.py create mode 100644 new_api/common/__init__.py create mode 100644 new_api/common/storage.py create mode 100644 new_api/data_handlers/__init__.py create mode 100644 new_api/data_handlers/maps.py create mode 100644 new_api/v3/serialization/map.py diff --git a/.helm/siibra-api/Chart.yaml b/.helm/siibra-api/Chart.yaml index 6fc9e433..91011adc 100644 --- a/.helm/siibra-api/Chart.yaml +++ b/.helm/siibra-api/Chart.yaml @@ -15,10 +15,10 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.1.3 +version: 0.1.4 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "0.3.17" +appVersion: "0.3.18" diff --git a/.helm/siibra-api/templates/deployment-worker-v4.yaml b/.helm/siibra-api/templates/deployment-worker-v4.yaml new file mode 100644 index 00000000..e3247091 --- /dev/null +++ b/.helm/siibra-api/templates/deployment-worker-v4.yaml @@ -0,0 +1,91 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "siibra-api.fullname" $ }}-worker-v4 + labels: + queuename: {{ . }} + {{- include "siibra-api.labels" $ | nindent 4 }} +spec: + strategy: + type: RollingUpdate + # This rolling update setup allow k8s to scale down before bringing up new pods. + # This should be more resource conservative, whilst sacrificing some availability during update. + rollingUpdate: + maxSurge: 0 + maxUnavailable: 1 + + {{- if not $.Values.autoscaling.enabled }} + replicas: {{ $.Values.replicaCount }} + {{- end }} + selector: + matchLabels: + role: worker + queuename: v4-all + sapiFlavor: {{ $.Values.sapiFlavor }} + {{- include "siibra-api.selectorLabels" $ | nindent 6 }} + template: + metadata: + {{- with $.Values.podAnnotations }} + annotations: + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + role: worker + queuename: v4-all + sapiFlavor: {{ $.Values.sapiFlavor }} + {{- include "siibra-api.labels" $ | nindent 8 }} + {{- with $.Values.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "siibra-api.serviceAccountName" $ }} + securityContext: + {{- toYaml $.Values.podSecurityContext | nindent 8 }} + containers: + - name: {{ $.Chart.Name }} + securityContext: + {{- toYaml $.Values.securityContext | nindent 12 }} + image: "{{ $.Values.image.repository }}:{{ include "siibra-api.root-img" $ }}-worker-v4" + imagePullPolicy: {{ $.Values.image.pullPolicy }} + livenessProbe: + # each pod has 20 seconds to become lively (which by definition is ready) + initialDelaySeconds: 2 + periodSeconds: 10 + timeoutSeconds: 4 + failureThreshold: 12 + exec: + command: ["/bin/bash", "-c", "cd /worker && python worker_health_v4.py"] + envFrom: + - configMapRef: + name: siibra-api-common + env: + - name: SIIBRA_CACHEDIR + value: "/siibra-api-volume/{{- include "siibra-api.cache-dir" $ -}}" + - name: SIIBRA_API_NAMESPACE + value: {{ $.Values.sapiFlavor }} + resources: + {{- toYaml $.Values.resourcesWorkerPod | nindent 12 }} + {{- with $.Values.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with $.Values.volumes }} + volumes: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $.Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $.Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $.Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/.helm/siibra-api/values.yaml b/.helm/siibra-api/values.yaml index 678596e4..2c6fe6ca 100644 --- a/.helm/siibra-api/values.yaml +++ b/.helm/siibra-api/values.yaml @@ -4,8 +4,8 @@ replicaCount: 1 -sapiVersion: "0.3.15" # "latest" or "0.3.15" -sapiWorkerQueues: ["core", "features", "volumes", "compounds", "vocabularies"] +sapiVersion: "0.3.18" # "latest" or "0.3.15" +sapiWorkerQueues: ["core", "features", "compounds", "vocabularies"] sapiFlavor: "prod" # could be prod, rc, latest, etc image: diff --git a/api/common/data_handlers/__init__.py b/api/common/data_handlers/__init__.py index 58988c6a..9f628446 100644 --- a/api/common/data_handlers/__init__.py +++ b/api/common/data_handlers/__init__.py @@ -1,6 +1,5 @@ from . import core from . import features -from . import volumes from . import compounds from ...siibra_api_config import ROLE diff --git a/api/common/data_handlers/volumes/__init__.py b/api/common/data_handlers/volumes/__init__.py deleted file mode 100644 index 962bc166..00000000 --- a/api/common/data_handlers/volumes/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from . import parcellationmap \ No newline at end of file diff --git a/api/common/data_handlers/volumes/parcellationmap.py b/api/common/data_handlers/volumes/parcellationmap.py deleted file mode 100644 index dfcad64d..00000000 --- a/api/common/data_handlers/volumes/parcellationmap.py +++ /dev/null @@ -1,203 +0,0 @@ -from api.common import data_decorator, get_filename, NotFound -from api.models.volumes.volume import MapType -from api.siibra_api_config import ROLE -from typing import Union, Dict, Tuple - -@data_decorator(ROLE) -def get_map(parcellation_id: str, space_id: str, maptype: Union[MapType, str]) -> Dict: - """Get a map instance, based on specification - - Args: - parcellation_id: lookup id of the parcellation of the map - space_id: lookup id of the space of the map - maptype: maptype, either LABELLED or STATISTICAL - - Returns: - Requested map instance, serialized into dict - - Raises: - AssertionError: if the supplied maptype is invalid type - NotFound: Map with the specification not found - """ - import siibra - from api.serialization.util import instance_to_model - - maptype_string = None - # check maptype name and value both matches - if isinstance(maptype, MapType): - assert maptype.name == maptype.value, f"str enum, expecting .name and .value to equal" - maptype_string = maptype.name - if isinstance(maptype, str): - maptype_string = maptype - - assert maptype_string is not None, f"maptype is neither MapType nor str" - - siibra_maptype = siibra.MapType[maptype_string] - assert siibra_maptype.name == maptype_string, f"Expecting maptype.name to match" - - returned_map = siibra.get_map(parcellation_id, space_id, siibra_maptype) - - if returned_map is None: - raise NotFound - - return instance_to_model( - returned_map - ).dict() - - -def cache_region_statistic_map(parcellation_id: str, region_id: str, space_id: str) -> Tuple[str, bool]: - """Retrieve and save regional statistical map (if necessary), and then return the path of the map. - - Args: - parcellation_id: lookup id of the parcellation of the map - region_id: lookup id of the region of the map - space_id: lookup id of the space of the map - - Returns: - path to statistical map, if a cached file is returned - """ - import os - full_filename = get_filename("statistical_map", parcellation_id, region_id, space_id, ext=".nii.gz") - if os.path.isfile(full_filename): - return full_filename, True - - import siibra - import nibabel as nib - error_text = f"Map with parc id '{parcellation_id}', space id '{space_id}'" - - stat_map = siibra.get_map(parcellation_id, space_id, siibra.MapType.STATISTICAL) - assert stat_map is not None, f"{error_text} returns None" - - volume_data = stat_map.fetch(region=region_id) - - error_text = f"{error_text}, with region_id '{region_id}'" - assert isinstance(volume_data, nib.Nifti1Image), f"{error_text}, volume provided is not of type Nifti1Image" - - nib.save(volume_data, full_filename) - import json - import time - with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: - json.dump({ - "prefix": "statistical_map", - "parcellation_id": parcellation_id, - "region_id": region_id, - "space_id": space_id, - }, fp=fp, indent="\t") - return full_filename, False - -@data_decorator(ROLE) -def get_region_statistic_map(parcellation_id: str, region_id: str, space_id: str): - """Retrieve and save regional statistical map (if necessary), and then return the path of the map. - - Args: - parcellation_id: lookup id of the parcellation of the map - region_id: lookup id of the region of the map - space_id: lookup id of the space of the map - - Returns: - path to statistical map, if a cached file is returned - """ - return cache_region_statistic_map(parcellation_id, region_id, space_id) - -@data_decorator(ROLE) -def get_region_statistic_map_info(parcellation_id: str, region_id: str, space_id: str): - """Retrieve and save regional statistical map (if necessary), and then return the path of the map. - - Args: - parcellation_id: lookup id of the parcellation of the map - region_id: lookup id of the region of the map - space_id: lookup id of the space of the map - - Returns: - dict of min an max of the statistical map - """ - full_filename, _cache_flag = cache_region_statistic_map(parcellation_id, region_id, space_id) - - import nibabel as nib - import numpy as np - - nii = nib.load(full_filename) - data = nii.get_fdata() - return { - "min": np.min(data), - "max": np.max(data), - } - -@data_decorator(ROLE) -def get_parcellation_labelled_map(parcellation_id: str, space_id: str, region_id:str=None): - """Retrieve and save labelled map / regional mask (if necessary), and then return the path of the map. - - Args: - parcellation_id: lookup id of the parcellation of the map - region_id: lookup id of the region of the map - space_id: lookup id of the space of the map - - Returns: - path to labelled map/regional mask, if a cached file is returned - """ - import os - full_filename = get_filename("labelled_map", parcellation_id, space_id, region_id if region_id else "", ext=".nii.gz") - if os.path.isfile(full_filename): - return full_filename, True - - import siibra - import nibabel as nib - error_text = f"Map with parc id '{parcellation_id}', space id '{space_id}'" - - volume_data = None - if region_id is not None: - region = siibra.get_region(parcellation_id, region_id) - volume_data = region.fetch_regional_map(space_id, siibra.MapType.LABELLED) - else: - labelled_map = siibra.get_map(parcellation_id, space_id, siibra.MapType.LABELLED) - assert labelled_map is not None, f"{error_text} returns None" - volume_data = labelled_map.fetch() - - assert isinstance(volume_data, nib.Nifti1Image), f"{error_text}, volume provided is not of type Nifti1Image" - - nib.save(volume_data, full_filename) - import json - import time - with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: - json.dump({ - "prefix": "labelled_map", - "parcellation_id": parcellation_id, - "space_id": space_id, - "region_id": region_id, - }, fp=fp, indent="\t") - return full_filename, False - -@data_decorator(ROLE) -def get_resampled_map(parcellation_id: str, space_id: str): - """Retrieve and save a labelled map, resampled in space (if necessary), and then return the path of the map. - - Args: - parcellation_id: lookup id of the parcellation of the map - space_id: lookup id of the target space of the sampled map - - Returns: - path to statistical map, if a cached file is returned - """ - import os - full_filename = get_filename("resampled_map", parcellation_id, space_id, ext=".nii.gz") - if os.path.isfile(full_filename): - return full_filename, True - - import siibra - import nibabel as nib - parcellation: siibra.core.parcellation.Parcellation = siibra.parcellations[parcellation_id] - parcellation_map = parcellation.get_map(siibra.spaces[space_id], siibra.MapType.LABELLED) - nii = parcellation_map.get_resampled_template() - - assert isinstance(nii, nib.Nifti1Image), f"resample failed... returned not of type nii" - - import time - import json - nib.save(nii, full_filename) - with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: - json.dump({ - "prefix": "resampled_map", - "parcellation_id": parcellation_id, - "space_id": space_id, - }, indent="\t", fp=fp) - return full_filename, False diff --git a/api/server/volumes/parcellationmap.py b/api/server/volumes/parcellationmap.py index dca4050b..88c9baea 100644 --- a/api/server/volumes/parcellationmap.py +++ b/api/server/volumes/parcellationmap.py @@ -10,8 +10,7 @@ from api.models.volumes.volume import MapType from api.models._commons import DataFrameModel from api.common import router_decorator, get_filename, logger, NotFound -from api.common.data_handlers.volumes.parcellationmap import get_map, get_region_statistic_map, get_region_statistic_map_info, get_parcellation_labelled_map, get_resampled_map -from new_api.v3.data_handlers.map import assign +from new_api.v3.data_handlers.map import assign, get_map, statistical_map_info_json, statistical_map_nii_gz, labelled_map_nii_gz, resampled_template from api.server.util import SapiCustomRoute import os @@ -24,22 +23,22 @@ @router.get("", response_model=MapModel) @version(*FASTAPI_VERSION) @router_decorator(ROLE, func=get_map) -def get_siibra_map(parcellation_id: str, space_id: str, map_type: MapType, *, func): +def get_siibra_map(parcellation_id: str, space_id: str, map_type: MapType, extra_spec: str= "", *, func): """Get map according to specification""" if func is None: raise HTTPException(500, f"func: None passsed") - return func(parcellation_id, space_id, map_type) + return func(parcellation_id, space_id, map_type, extra_spec) @router.get("/resampled_template", response_class=FileResponse, tags=TAGS, description=""" Return a resampled template volume, based on labelled parcellation map. """) @version(*FASTAPI_VERSION) -@router_decorator(ROLE, func=get_resampled_map) +@router_decorator(ROLE, func=resampled_template) def get_resampled_map(parcellation_id: str, space_id: str, *, func): """Get resampled map according to specification""" if func is None: raise HTTPException(500, f"func: None passsed") - + raise HTTPException(501, "Not yet implemented. If you are using this endpoint, please contact us.") headers={ "content-type": "application/octet-stream", "content-disposition": f'attachment; filename="labelled_map.nii.gz"' @@ -60,7 +59,7 @@ def get_resampled_map(parcellation_id: str, space_id: str, *, func): region_id MAY refer to ANY region on the region hierarchy, and a combined mask will be returned. """) @version(*FASTAPI_VERSION) -@router_decorator(ROLE, func=get_parcellation_labelled_map) +@router_decorator(ROLE, func=labelled_map_nii_gz) def get_parcellation_labelled_map(parcellation_id: str, space_id: str, region_id: str=None, *, func): """Get labelled map according to specification""" if func is None: @@ -84,7 +83,7 @@ def get_parcellation_labelled_map(parcellation_id: str, space_id: str, region_id region_id MUST refer to leaf region on the region hierarchy. """) @version(*FASTAPI_VERSION) -@router_decorator(ROLE, func=get_region_statistic_map) +@router_decorator(ROLE, func=statistical_map_nii_gz) def get_region_statistical_map(parcellation_id: str, space_id: str, region_id: str, *, func): """Get statistical map according to specification""" if func is None: @@ -107,7 +106,7 @@ class StatisticModelInfo(BaseModel): @router.get("/statistical_map.info.json", response_model=StatisticModelInfo, tags=TAGS) @version(*FASTAPI_VERSION) -@router_decorator(ROLE, func=get_region_statistic_map_info) +@router_decorator(ROLE, func=statistical_map_info_json) def get_region_statistical_map_metadata(parcellation_id: str, space_id: str, region_id: str, *, func): """Get metadata of statistical map according to specification""" if func is None: @@ -116,7 +115,7 @@ def get_region_statistical_map_metadata(parcellation_id: str, space_id: str, reg data = func(parcellation_id, region_id, space_id) return StatisticModelInfo(**data) -@router.get("/assign", response_model=DataFrameModel, tags=[TAGS]) +@router.get("/assign", response_model=DataFrameModel, tags=TAGS) @version(*FASTAPI_VERSION) @router_decorator(ROLE, func=assign) def get_assign_point(parcellation_id: str, space_id: str, point: str, assignment_type: str="statistical", sigma_mm: float=0., *, func): diff --git a/new_api/common/__init__.py b/new_api/common/__init__.py new file mode 100644 index 00000000..87c79f07 --- /dev/null +++ b/new_api/common/__init__.py @@ -0,0 +1 @@ +from .storage import get_filename diff --git a/new_api/common/decorators.py b/new_api/common/decorators.py index f3817963..819d8c6d 100644 --- a/new_api/common/decorators.py +++ b/new_api/common/decorators.py @@ -19,6 +19,8 @@ def data_decorator(role: ROLE_TYPE): Raises: ImportError: Celery not installed, but role is set to either `worker` or `server` """ + if role != "worker": + logger.warning(f"Role was set to be {role}, calls to map/* endpoints will fail. See https://github.com/FZJ-INM1-BDA/siibra-api/issues/151") def outer_wrapper(fn): if role == "all": return fn diff --git a/new_api/common/storage.py b/new_api/common/storage.py new file mode 100644 index 00000000..1e60bdcf --- /dev/null +++ b/new_api/common/storage.py @@ -0,0 +1,19 @@ +from new_api.siibra_api_config import SIIBRA_API_SHARED_DIR +import hashlib +import os +from typing import List + +def get_filename(*args: List[str], ext:str=None) -> str: + """Get a hashed filename based on positional arguments. + + Will also honor `SIIBRA_API_SHARED_DIR` in config, if defined. + + Args: + args: positional arguments + ext: extension + + Returns: + hashed path, in the form of `{SIIBRA_API_SHARED_DIR}/{hash(*args)} + ('.{ext}' if ext else '')` + """ + assert all(isinstance(arg, str) for arg in args), f"all args to get_filename must be str" + return os.path.join(SIIBRA_API_SHARED_DIR, hashlib.md5("".join(args).encode("utf-8")).hexdigest() + (f".{ext.lstrip('.')}") if ext else "") diff --git a/new_api/data_handlers/__init__.py b/new_api/data_handlers/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/new_api/data_handlers/maps.py b/new_api/data_handlers/maps.py new file mode 100644 index 00000000..61df7a77 --- /dev/null +++ b/new_api/data_handlers/maps.py @@ -0,0 +1,124 @@ +from typing import Tuple, Union +from pathlib import Path +import json +import time +import os + +from new_api.common import get_filename + +def cache_region_statistic_map(parcellation_id: str, region_id: str, space_id: str, extra_spec: str= "", *, no_cache=False) -> Tuple[str, bool, str]: + """Retrieve and save regional statistical map (if necessary), and then return the path of the map. + + Args: + parcellation_id: lookup id of the parcellation of the map + region_id: lookup id of the region of the map + space_id: lookup id of the space of the map + + Returns: + path to statistical map, if a cached file is returned, warning text, if any + """ + import siibra + import nibabel as nib + + full_filename = get_filename("statistical_map", parcellation_id, region_id, space_id, ext=".nii.gz") + warning_texts = None + if not no_cache and os.path.isfile(full_filename): + try: + warning_texts = Path(f"{full_filename}.warning.txt").read_text() + except: + ... + return full_filename, True, warning_texts + + error_text = f"Map with parc id '{parcellation_id}', space id '{space_id}'" + + maps = siibra.find_maps(parcellation_id, space_id, "statistical", extra_spec) + assert len(maps) > 0, f"{error_text} returns None" + + if len(maps) > 1: + warning_texts = f"Multiple ({len(maps)}) maps found" + + volume_data = maps[0].fetch(region=region_id) + + error_text = f"{error_text}, with region_id '{region_id}'" + assert isinstance(volume_data, nib.Nifti1Image), f"{error_text}, volume provided is not of type Nifti1Image" + + nib.save(volume_data, full_filename) + + if warning_texts: + with open(f"{full_filename}.warning.txt", "w") as fp: + fp.write(warning_texts) + + with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: + json.dump({ + "prefix": "statistical_map", + "parcellation_id": parcellation_id, + "region_id": region_id, + "space_id": space_id, + }, fp=fp, indent="\t") + return full_filename, False, warning_texts + + +def cache_parcellation_labelled_map(parcellation_id: str, space_id: str, region_id:Union[str, None]=None, *, no_cache=False) -> Tuple[str, bool, str]: + """Retrieve and save labelled map / regional mask (if necessary), and then return the path of the map. + + Args: + parcellation_id: lookup id of the parcellation of the map + region_id: lookup id of the region of the map + space_id: lookup id of the space of the map + + Returns: + path to labelled map/regional mask, if a cached file is returned, warning text, if any + """ + full_filename = get_filename("labelled_map", parcellation_id, space_id, region_id if region_id else "", ext=".nii.gz") + if not no_cache and os.path.isfile(full_filename): + return full_filename, True, None + + import siibra + import nibabel as nib + error_text = f"Map with parc id '{parcellation_id}', space id '{space_id}'" + + volume_data = None + if region_id is not None: + region = siibra.get_region(parcellation_id, region_id) + volume_data = region.fetch_regional_map(space_id, "labelled") + else: + labelled_map = siibra.get_map(parcellation_id, space_id, "labelled") + assert labelled_map is not None, f"{error_text} returns None" + volume_data = labelled_map.fetch() + + assert isinstance(volume_data, nib.Nifti1Image), f"{error_text}, volume provided is not of type Nifti1Image" + + nib.save(volume_data, full_filename) + with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: + json.dump({ + "prefix": "labelled_map", + "parcellation_id": parcellation_id, + "space_id": space_id, + "region_id": region_id, + }, fp=fp, indent="\t") + return full_filename, False, None + + +def cache_resampled_map(parcellation_id: str, space_id: str, *, no_cache: bool): + full_filename = get_filename("resampled_map", parcellation_id, space_id, ext=".nii.gz") + if not no_cache and os.path.isfile(full_filename): + return full_filename, True, None + + import nibabel as nib + import siibra + from siibra.commons.maps import resample_img_to_img + + mp = siibra.get_map(parcellation_id, space_id, "labelled") + nii = mp.fetch() + space = siibra.get_space(space_id) + tmpl_nii = space.fetch_template() + resampled = resample_img_to_img(nii, tmpl_nii) + + nib.save(resampled, full_filename) + with open(f"{full_filename}.{str(int(time.time()))}.json", "w") as fp: + json.dump({ + "prefix": "resampled_map", + "parcellation_id": parcellation_id, + "space_id": space_id, + }, fp=fp, indent="\t") + return full_filename, False, None diff --git a/new_api/v3/data_handlers/map/__init__.py b/new_api/v3/data_handlers/map/__init__.py index dee7a072..eef55aed 100644 --- a/new_api/v3/data_handlers/map/__init__.py +++ b/new_api/v3/data_handlers/map/__init__.py @@ -1,6 +1,8 @@ -from typing import Literal, List +from typing import Literal, List, Union +from new_api.data_handlers.maps import cache_region_statistic_map, cache_parcellation_labelled_map, cache_resampled_map from new_api.v3.serialization import instance_to_model +from new_api.v3.models.volumes.volume import MapType from new_api.common.exceptions import NotFound from new_api.common.decorators import data_decorator from new_api.siibra_api_config import ROLE @@ -21,6 +23,76 @@ def assign(parcellation_id: str, space_id: str, point: str, assignment_type: str result = mp.lookup_points(point) return instance_to_model(result, detail=True).dict() +@data_decorator(ROLE) +def get_map(parcellation_id: str, space_id: str, maptype: Union[MapType, str], extra_spec: str): + """Get a map instance, based on specification + + Args: + parcellation_id: lookup id of the parcellation of the map + space_id: lookup id of the space of the map + maptype: maptype, either LABELLED or STATISTICAL + + Returns: + Requested map instance, serialized into dict + + Raises: + AssertionError: if the supplied maptype is invalid type + NotFound: Map with the specification not found + """ + import siibra + + # check maptype name and value both matches + if isinstance(maptype, MapType): + assert maptype.name == maptype.value, f"str enum, expecting .name and .value to equal" + maptype = maptype.name + + assert maptype is not None, f"maptype is neither MapType nor str" + + returned_maps = siibra.find_maps(parcellation_id, space_id, maptype=maptype, extra_spec=extra_spec) + + if len(returned_maps) == 0: + raise NotFound(f"get_map with spec {parcellation_id=!r}, {space_id=!r}, {maptype=!r}, {extra_spec=!r} found no map.") + return instance_to_model(returned_maps[0], detail=True) + +@data_decorator(ROLE) +def statistical_map_nii_gz(parcellation_id: str, space_id: str, region_id: str, extra_spec: str, *, no_cache: bool): + filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, extra_spec, no_cache=no_cache) + return filename, return_cached + +@data_decorator(ROLE) +def statistical_map_info_json(parcellation_id: str, space_id: str, region_id: str, extra_spec: str, *, no_cache: bool): + filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, extra_spec, no_cache=no_cache) + + import nibabel as nib + import numpy as np + nii = nib.load(filename) + data = nii.get_fdata() + return { + "min": np.min(data), + "max": np.max(data), + } + +@data_decorator(ROLE) +def labelled_map_nii_gz(parcellation_id: str, space_id: str, region_id: str=None): + """Retrieve and save labelled map / regional mask (if necessary), and then return the path of the map. + + Args: + parcellation_id: lookup id of the parcellation of the map + region_id: lookup id of the region of the map + space_id: lookup id of the space of the map + + Returns: + path to labelled map/regional mask, if a cached file is returned + """ + + full_filename, return_cached, warningtext = cache_parcellation_labelled_map(parcellation_id, space_id, region_id) + return full_filename, return_cached + +@data_decorator(ROLE) +def resampled_template(parcellation_id: str, space_id: str): + full_filename, return_cached, warningtext = cache_resampled_map(parcellation_id, space_id) + return full_filename, return_cached + @register_warmup_fn() def warmup_maps(): import siibra @@ -31,4 +103,3 @@ def warmup_maps(): map._get_readable_sparseindex(warmup=True) except Exception as e: logger.warning(f"Failed to save sparseindex: {str(e)}") - diff --git a/new_api/v3/models/volumes/volume.py b/new_api/v3/models/volumes/volume.py index 83903ef6..ed4a1eaf 100644 --- a/new_api/v3/models/volumes/volume.py +++ b/new_api/v3/models/volumes/volume.py @@ -29,5 +29,5 @@ class MapType(str, Enum): """MapType Exact match to MapType in siibra, to avoid dependency on siibra""" - LABELLED = "LABELLED" - STATISTICAL = "STATISTICAL" + labelled = "labelled" + statistical = "statistical" diff --git a/new_api/v3/serialization/__init__.py b/new_api/v3/serialization/__init__.py index 964bb8d4..659828ee 100644 --- a/new_api/v3/serialization/__init__.py +++ b/new_api/v3/serialization/__init__.py @@ -6,6 +6,7 @@ def instance_to_model(instance: Any, **kwargs): from . import _common + from . import map if instance is None: return None diff --git a/new_api/v3/serialization/_common.py b/new_api/v3/serialization/_common.py index 2c9dd58d..c1228181 100644 --- a/new_api/v3/serialization/_common.py +++ b/new_api/v3/serialization/_common.py @@ -4,7 +4,7 @@ from new_api.v3.models._commons import SeriesModel, DataFrameModel -from . import instance_to_model +from . import instance_to_model, serialize # will affect how dtype attribute is serialised serializable_dtype = ( diff --git a/new_api/v3/serialization/map.py b/new_api/v3/serialization/map.py new file mode 100644 index 00000000..8600a558 --- /dev/null +++ b/new_api/v3/serialization/map.py @@ -0,0 +1,89 @@ +from typing import Union +from collections import defaultdict + +from . import serialize + +from new_api.v3.models.volumes.volume import VolumeModel +from new_api.v3.models.volumes.parcellationmap import MapModel +from new_api.v3.models.core._concept import SiibraPublication +from new_api.v3.models._retrieval.datasets import EbrainsDatasetModel, EbrainsDsPerson + +from siibra.atlases.parcellationmap import Map +from siibra.attributes.descriptions import Name, EbrainsRef +from siibra.attributes.dataitems.base import Archive +from siibra.attributes.dataitems.volume.base import Volume, MESH_FORMATS, IMAGE_FORMATS +from siibra.factory.livequery.ebrains import EbrainsQuery + +def parse_archive_options(archive: Union[Archive, None]): + if archive is None: + return "", "" + return archive["format"], f" {archive['file']}" + +@serialize(Map) +def map_to_model(mp: Map, **kwargs): + + # technically works for all atlas concepts + id = mp.ID + name_attr = mp._get(Name) + name = name_attr.value + shortname = name_attr.shortform + description = mp.description + publications = [SiibraPublication(citation=pub.text, url=pub.value) + for pub in mp.publications] + EbrainsDatasetModel(id="", name="", urls=[]) + + + got_dsv = [ EbrainsQuery.get_dsv(dsv) + for ref in mp._find(EbrainsRef) + for dsv in ref._dataset_verion_ids] + + # TODO check for any non empty entry of custodian and transform properly + datasets = [EbrainsDatasetModel(id=dsv["id"], + name=dsv["fullName"], + urls=[{"url": dsv["homepage"]}], + description=dsv["description"], + contributors=[EbrainsDsPerson(id=author["id"], + identifier=author["id"], + shortName=author["shortName"], + name=author["fullName"]) for author in dsv["author"]], + custodians=[]) for dsv in got_dsv] + + # specific to map model + species = mp.species + + # TODO fix datasets + all_volumes = mp._find(Volume) + volumes = [VolumeModel(name="", + formats=[vol.format], + provides_mesh=vol.format in MESH_FORMATS, + provides_image=vol.format in IMAGE_FORMATS, + fragments={}, + variant=None, + provided_volumes={ + f"{parse_archive_options(vol.archive_options)[0]}{vol.format}": f"{vol.url}{parse_archive_options(vol.archive_options)[0]}" + }, + space={ + "@id": vol.space_id + }, + datasets=[]) for vol in all_volumes] + + indices = defaultdict(list) + for idx, vol in enumerate(all_volumes): + for regionname, value in vol.mapping.items(): + new_index = { + "volume": idx + } + if value.get("label"): + new_index["label"] = value.get("label") + indices[regionname].append(new_index) + return MapModel( + id=id, + name=name, + shortname=shortname, + description=description, + publications=publications, + datasets=datasets, + species=species, + indices=indices, + volumes=volumes, + ) diff --git a/worker-v4.dockerfile b/worker-v4.dockerfile index dbd4a0e5..1109eef3 100644 --- a/worker-v4.dockerfile +++ b/worker-v4.dockerfile @@ -14,4 +14,4 @@ ENV SIIBRA_API_ROLE=worker HEALTHCHECK --interval=60s --timeout=10s --start-period=120s --retries=3 \ CMD [ "python", "worker_health_4.py" ] -ENTRYPOINT celery -A new_api.worker.app worker -l WARNING +ENTRYPOINT celery -A new_api.worker.app worker -l WARNING -O fair