Skip to content

Commit

Permalink
fix: revert to siibra-python private hash
Browse files Browse the repository at this point in the history
  • Loading branch information
xgui3783 committed Oct 4, 2024
1 parent a771984 commit 25bc919
Show file tree
Hide file tree
Showing 3 changed files with 43 additions and 130 deletions.
20 changes: 10 additions & 10 deletions new_api/v3/data_handlers/map/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,20 @@
from new_api.warmup import register_warmup_fn

@data_decorator(ROLE)
def assign(parcellation_id: str, space_id: str, point: str, assignment_type: str=Literal["statistical", "labelled"], sigma_mm: float=0., name: str=""):
def assign(parcellation_id: str, space_id: str, point: str, assignment_type: str=Literal["statistical", "labelled"], sigma_mm: float=0., extra_specs: str=""):
import siibra
from siibra.attributes.locations.point import parse_coordinate, Point
coordinate = parse_coordinate(point)
point = Point(coordinate=coordinate, space_id=space_id, sigma=sigma_mm)
maps = siibra.find_maps(parcellation_id, space_id, assignment_type, name)
maps = siibra.find_maps(parcellation_id, space_id, assignment_type, extra_specs)
if len(maps) == 0:
raise NotFound(f"map with {parcellation_id=!r}, {space_id=!r}, {assignment_type=!r}, {name=!r} not found")
raise NotFound(f"map with {parcellation_id=!r}, {space_id=!r}, {assignment_type=!r}, {extra_specs=!r} not found")
mp = maps[0]
result = mp.lookup_points(point)
return instance_to_model(result, detail=True).dict()

@data_decorator(ROLE)
def get_map(parcellation_id: str, space_id: str, maptype: Union[MapType, str], name: str=""):
def get_map(parcellation_id: str, space_id: str, maptype: Union[MapType, str], extra_spec: str=""):
"""Get a map instance, based on specification
Args:
Expand All @@ -49,20 +49,20 @@ def get_map(parcellation_id: str, space_id: str, maptype: Union[MapType, str], n
assert maptype is not None, f"maptype is neither MapType nor str"
maptype = maptype.lower()

returned_maps = siibra.find_maps(parcellation_id, space_id, maptype=maptype, name=name)
returned_maps = siibra.find_maps(parcellation_id, space_id, maptype=maptype, extra_spec=extra_spec)

if len(returned_maps) == 0:
raise NotFound(f"get_map with spec {parcellation_id=!r}, {space_id=!r}, {maptype=!r}, {name=!r} found no map.")
raise NotFound(f"get_map with spec {parcellation_id=!r}, {space_id=!r}, {maptype=!r}, {extra_spec=!r} found no map.")
return instance_to_model(returned_maps[0], detail=True).dict()

@data_decorator(ROLE)
def statistical_map_nii_gz(parcellation_id: str, region_id: str, space_id: str, name: str="", *, no_cache: bool=False):
filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, name, no_cache=no_cache)
def statistical_map_nii_gz(parcellation_id: str, region_id: str, space_id: str, extra_spec: str="", *, no_cache: bool=False):
filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, extra_spec, no_cache=no_cache)
return filename, return_cached

@data_decorator(ROLE)
def statistical_map_info_json(parcellation_id: str, region_id: str, space_id: str, name: str="", *, no_cache: bool=False):
filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, name, no_cache=no_cache)
def statistical_map_info_json(parcellation_id: str, region_id: str, space_id: str, extra_spec: str="", *, no_cache: bool=False):
filename, return_cached, warningtext = cache_region_statistic_map(parcellation_id, region_id, space_id, extra_spec, no_cache=no_cache)

import nibabel as nib
import numpy as np
Expand Down
151 changes: 32 additions & 119 deletions new_api/v3/serialization/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

from . import serialize

from new_api.siibra_api_config import SIIBRA_API_REMAP_PROVIDERS
from new_api.v3.models.volumes.volume import VolumeModel
from new_api.v3.models.volumes.parcellationmap import MapModel
from new_api.v3.models.core._concept import SiibraPublication
Expand All @@ -14,10 +13,9 @@
from siibra.cache import fn_call_cache
from siibra.atlases.parcellationmap import Map
from siibra.atlases.sparsemap import SparseMap
from siibra.attributes.descriptions import Name, EbrainsRef, AttributeMapping
from siibra.attributes.dataproviders.base import Archive
from siibra.attributes.dataproviders.volume.base import VolumeProvider
from siibra.operations.volume_fetcher.base import VolumeFormats
from siibra.attributes.descriptions import Name, EbrainsRef
from siibra.attributes.dataitems.base import Archive
from siibra.attributes.dataitems.volume.base import Volume, MESH_FORMATS, IMAGE_FORMATS
from siibra.factory.livequery.ebrains import EbrainsQuery, DatasetVersion

def parse_archive_options(archive: Union[Archive, None]):
Expand All @@ -39,8 +37,6 @@ def parse_archive_options(archive: Union[Archive, None]):
"ctx-rh-": "right ",
}

FSA_ID = "minds/core/referencespace/v1.0.0/tmp-fsaverage"


def clear_name(name: str):
"""clean up a region name to the for matching"""
Expand All @@ -51,24 +47,9 @@ def clear_name(name: str):
result = result.replace(search, repl)
return " ".join(w for w in result.split(" ") if len(w))

def remap_url(url: str):
for from_host, to_host in SIIBRA_API_REMAP_PROVIDERS.items():
url = url.replace(from_host, to_host)
return url

@fn_call_cache
def retrieve_dsv_ds(mp: Map):
list_dsvs = list({dsv for ref in mp._find(EbrainsRef) for dsv in ref._dataset_verion_ids})
ds_ids = [ref
for attr_mapping in mp._find(AttributeMapping)
if attr_mapping.ref_type == "openminds/Dataset"
for ref in attr_mapping.refs]
dsv_ids = [ref
for attr_mapping in mp._find(AttributeMapping)
if attr_mapping.ref_type == "openminds/DatasetVersion"
for ref in attr_mapping.refs]

unique_dsvs = set([*list_dsvs, *dsv_ids])
unique_dsvs = list({dsv for ref in mp._find(EbrainsRef) for dsv in ref._dataset_verion_ids})
with ThreadPoolExecutor() as ex:
got_dsv = list(
tqdm(
Expand All @@ -81,13 +62,13 @@ def retrieve_dsv_ds(mp: Map):
leave=True
)
)
unique_ds = set([
is_version_of["id"].split("/")[-1]
for dsv in got_dsv
for is_version_of in dsv["isVersionOf"]
])

unique_ds = unique_ds | set(ds_ids)
unique_ds = list(
{
is_version_of["id"].split("/")[-1]
for dsv in got_dsv
for is_version_of in dsv["isVersionOf"]
}
)
got_ds = list(
tqdm(
ex.map(
Expand Down Expand Up @@ -130,10 +111,9 @@ def dsv_id_to_model(id: str):
id = "https://kg.ebrains.eu/api/instances/" + id.replace("https://kg.ebrains.eu/api/instances/", "")
assert id in dsv_dict, f"{id} not found in dsv_dict"
dsv = dsv_dict[id]
urls = [{"url": doi["identifier"]} for doi in dsv.get("doi", [])]
return EbrainsDatasetModel(id=id,
name=dsv["fullName"] or "",
urls=urls,
urls=[{"url": dsv["homepage"]}] if dsv["homepage"] else [],
description=get_description(dsv),
contributors=[EbrainsDsPerson(id=author["id"],
identifier=author["id"],
Expand All @@ -151,109 +131,42 @@ def dsv_id_to_model(id: str):
species = mp.species

# TODO fix datasets
all_volumes = mp._find(VolumeProvider)
all_volumes = mp._find(Volume)
volumes: List[VolumeModel] = []

indices = defaultdict(list)
volume_name_to_idx = defaultdict(list)

for idx, vol in enumerate(all_volumes):
volume_name_to_idx[vol.name].append(idx)
for vol in all_volumes:
vol_ds: List[EbrainsDatasetModel] = []

for attr_mapping in mp._find(AttributeMapping):

ids = [uuid
for uuid, mappings in attr_mapping.refs.items()
# if mapping has "target": None or "target": vol.name
if len({mapping.get("target") for mapping in mappings} & {vol.name, None}) > 0 ]
ebrains_ref = EbrainsRef(ids={ attr_mapping.ref_type: ids })

vol_ds.extend([dsv_id_to_model(dsv) for dsv in ebrains_ref._dataset_verion_ids])

if vol.id:
vol_ds = [dsv_id_to_model(dsv)
for ref in mp._find(EbrainsRef)
for dsv in ref._dataset_verion_ids
if ref.annotates == vol.id]

volumes.append(
VolumeModel(name="",
formats=[vol.format],
provides_mesh=vol.format in VolumeFormats.MESH_FORMATS,
provides_image=vol.format in VolumeFormats.IMAGE_FORMATS,
provides_mesh=vol.format in MESH_FORMATS,
provides_image=vol.format in IMAGE_FORMATS,
fragments={},
variant=None,
provided_volumes={
f"{parse_archive_options(vol.archive_options)[0]}{vol.format}": f"{remap_url(vol.url)}{parse_archive_options(vol.archive_options)[0]}"
f"{parse_archive_options(vol.archive_options)[0]}{vol.format}": f"{vol.url}{parse_archive_options(vol.archive_options)[0]}"
},
space={
"@id": vol.space_id
},
datasets=vol_ds))


for regionname, mappings in mp.region_mapping.items():
for mapping in mappings:
target = mapping["target"]
assert target in volume_name_to_idx, f"target {target} not found in volume name {volume_name_to_idx}"
for idx in volume_name_to_idx[target]:
new_index = {
"volume": idx
}
if mapping.get("label"):
new_index["label"] = mapping.get("label")
indices[regionname].append(new_index)
indices[clear_name(regionname)].append(new_index)

if mp.space_id == FSA_ID:
assert len(all_volumes) == 2, f"Expected fsaverage to have 2 volumes, but got {len(all_volumes)}"

lh_vols = [v for v in all_volumes if "lh" in v.url]
rh_vols = [v for v in all_volumes if "rh" in v.url]
assert len(lh_vols) == 1, f"Expected to be one and only one lh volume, but got {len(lh_vols)}"
assert len(rh_vols) == 1, f"Expected to be one and only one rh volume, but got {len(rh_vols)}"

lh_vol = lh_vols[0]
rh_vol = rh_vols[0]

formats = list({lh_vol.format, rh_vol.format})
assert len(formats) == 1, f"Expected only one type of format, but got {formats}"
format = formats[0]
# assert lh_vol.archive_options is None and rh_vol.archive_options is None, f"Expected neither volume has archive options"

all_vol_ids = [vol.id for vol in all_volumes if vol.id]
all_vol_ds = [dsv_id_to_model(dsv)
for ref in mp._find(EbrainsRef)
for dsv in ref._dataset_verion_ids
if ref.annotates in all_vol_ids]
volumes = [
VolumeModel(name="",
formats=[format],
provides_mesh=vol.format in VolumeFormats.MESH_FORMATS,
provides_image=vol.format in VolumeFormats.IMAGE_FORMATS,
fragments={},
variant=None,
provided_volumes={
format: {
"left hemisphere": remap_url(lh_vol.url),
"right hemisphere": remap_url(rh_vol.url),
}
},
space={
"@id": mp.space_id
},
datasets=all_vol_ds
)
]
for regionname, mappings in indices.items():
assert len(mappings) == 1, f"Expected only one mapping, but got {len(mappings)}"
mapping = mappings[0]
if "left" in regionname:
mapping["volume"] = 0
mapping["fragment"] = "left hemisphere"
continue
if "right" in regionname:
mapping["volume"] = 0
mapping["fragment"] = "right hemisphere"
continue
raise RuntimeError(f"{regionname=!r} is neither lh or rh")

indices = defaultdict(list)
for idx, vol in enumerate(all_volumes):
for regionname, value in vol.mapping.items():
new_index = {
"volume": idx
}
if value.get("label"):
new_index["label"] = value.get("label")
indices[regionname].append(new_index)
indices[clear_name(regionname)].append(new_index)
return MapModel(
id=id,
name=name,
Expand Down
2 changes: 1 addition & 1 deletion requirements/v4-siibra.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# refactor_attr is... borked since dataops are being introduced
git+https://github.com/xgui3783/siibra-python.git@aeffb478a1c98f3bec276ab820f5e50355c598a4
git+https://github.com/xgui3783/siibra-python.git@ebc1b0d3fc9e85d863057e7147fe27a40e0e5158

0 comments on commit 25bc919

Please sign in to comment.