Skip to content

Commit

Permalink
fix: logger -> general_logger
Browse files Browse the repository at this point in the history
fix: move origin header middleware
fix: ebrains ds person default None value
feat: cache retrieved dsv
  • Loading branch information
xgui3783 committed Aug 26, 2024
1 parent e90fe97 commit 5a76335
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 30 deletions.
35 changes: 18 additions & 17 deletions api/server/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from .metrics import prom_metrics_resp, on_startup as metrics_on_startup, on_terminate as metrics_on_terminate
from .code_snippet import get_sourcecode

from ..common import logger, access_logger, NotFound, SapiBaseException, name_to_fns_map
from ..common import logger, general_logger, access_logger, NotFound, SapiBaseException, name_to_fns_map
from ..siibra_api_config import GIT_HASH

siibra_version_header = "x-siibra-api-version"
Expand Down Expand Up @@ -55,18 +55,6 @@
expose_headers=[siibra_version_header]
)

# some plugins may strip origin header for privacy reasons
# so if origin is unavailable, append it to trick corsmiddleware to activate
@siibra_api.middleware("http")
async def append_origin_header(request: Request, call_next):
headers = dict(request.scope["headers"])
origin = request.headers.get("origin")
new_headers = [(k, v) for k, v in headers.items()]
if not origin:
new_headers.append((b"origin", b"unknownorigin.dev"))
request.scope["headers"] = new_headers
return await call_next(request)

@siibra_api.get("/metrics", include_in_schema=False)
def get_metrics():
"""Get prometheus metrics"""
Expand Down Expand Up @@ -295,15 +283,27 @@ async def middleware_access_log(request: Request, call_next):
"hit_cache": "cache_miss"
})
except Exception as e:
logger.critical(e)
general_logger.critical(e)

# some plugins may strip origin header for privacy reasons
# so if origin is unavailable, append it to trick corsmiddleware to activate
@siibra_api.middleware("http")
async def append_origin_header(request: Request, call_next):
headers = dict(request.scope["headers"])
origin = request.headers.get("origin")
new_headers = [(k, v) for k, v in headers.items()]
if not origin:
new_headers.append((b"origin", b"unknownorigin.dev"))
request.scope["headers"] = new_headers
return await call_next(request)

@siibra_api.exception_handler(RuntimeError)
async def exception_runtime(request: Request, exc: RuntimeError) -> JSONResponse:
"""Handling RuntimeErrors.
Most of the RuntimeErrors are thrown by the siibra-python library when other Services are not responding.
To be more resilient and not throw a simple and unplanned HTTP 500 response, this handler will return an HTTP 503
status."""
logger.warning(f"Error handler: exception_runtime: {str(exc)}")
general_logger.warning(f"Error handler: exception_runtime: {str(exc)}")
return JSONResponse(
status_code=503,
content={
Expand All @@ -312,16 +312,17 @@ async def exception_runtime(request: Request, exc: RuntimeError) -> JSONResponse
},
)


@siibra_api.exception_handler(SapiBaseException)
def exception_sapi(request: Request, exc: SapiBaseException):
"""Handle sapi errors"""
logger.warning(f"Error handler: exception_sapi: {str(exc)}")
general_logger.warning(f"Error handler: exception_sapi: {str(exc)}")
raise HTTPException(400, str(exc))

@siibra_api.exception_handler(Exception)
async def exception_other(request: Request, exc: Exception):
"""Catch all exception handler"""
logger.warning(f"Error handler: exception_other: {str(exc)}")
general_logger.warning(f"Error handler: exception_other: {str(exc)}")
return JSONResponse(
status_code=500,
content={
Expand Down
2 changes: 1 addition & 1 deletion new_api/v3/models/_retrieval/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ class EbrainsDsUrl(ConfigBaseModel):
class EbrainsDsPerson(ConfigBaseModel):
"""EbrainsDsPerson"""
id: str = Field(..., alias="@id")
schema_shortname: Optional[str] = Field(..., alias="schema.org/shortName")
schema_shortname: Optional[str] = Field(None, alias="schema.org/shortName")
identifier: str
shortName: str
name: str
Expand Down
29 changes: 17 additions & 12 deletions new_api/v3/serialization/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from new_api.v3.models.core._concept import SiibraPublication
from new_api.v3.models._retrieval.datasets import EbrainsDatasetModel, EbrainsDsPerson

from siibra.cache import fn_call_cache
from siibra.atlases.parcellationmap import Map
from siibra.atlases.sparsemap import SparseMap
from siibra.attributes.descriptions import Name, EbrainsRef
Expand Down Expand Up @@ -44,6 +45,13 @@ def clear_name(name: str):
result = result.replace(search, repl)
return " ".join(w for w in result.split(" ") if len(w))

@fn_call_cache
def retrieve_dsv(mp: Map):
got_dsv = [ EbrainsQuery.get_dsv(dsv)
for ref in mp._find(EbrainsRef)
for dsv in ref._dataset_verion_ids]
return got_dsv

@serialize(SparseMap)
@serialize(Map)
def map_to_model(mp: Map, **kwargs):
Expand All @@ -57,20 +65,17 @@ def map_to_model(mp: Map, **kwargs):
publications = [SiibraPublication(citation=pub.text, url=pub.value)
for pub in mp.publications]

got_dsv = [ EbrainsQuery.get_dsv(dsv)
for ref in mp._find(EbrainsRef)
for dsv in ref._dataset_verion_ids]

got_dsv = retrieve_dsv(mp)
# TODO check for any non empty entry of custodian and transform properly
datasets = [EbrainsDatasetModel(id=dsv["id"],
name=dsv["fullName"] or "",
urls=[{"url": dsv["homepage"]}] if dsv["homepage"] else [],
description=dsv["description"],
contributors=[EbrainsDsPerson(id=author["id"],
identifier=author["id"],
shortName=author["shortName"],
name=author["fullName"]) for author in dsv["author"]],
custodians=[]) for dsv in got_dsv]
name=dsv["fullName"] or "",
urls=[{"url": dsv["homepage"]}] if dsv["homepage"] else [],
description=dsv["description"],
contributors=[EbrainsDsPerson(id=author["id"],
identifier=author["id"],
shortName=author["shortName"] or f"{author['givenName']} {author['familyName']}",
name=author["fullName"] or f"{author['givenName']} {author['familyName']}") for author in dsv["author"]],
custodians=[]) for dsv in got_dsv]

# specific to map model
species = mp.species
Expand Down

0 comments on commit 5a76335

Please sign in to comment.