diff --git a/bento_beacon/app.py b/bento_beacon/app.py index 7766e163..fcc941f7 100644 --- a/bento_beacon/app.py +++ b/bento_beacon/app.py @@ -1,3 +1,4 @@ +import asyncio import logging import os from flask import Flask, current_app, request @@ -10,7 +11,6 @@ from .endpoints.cohorts import cohorts from .endpoints.datasets import datasets from .network.network import network -from .network.utils import init_network_service_registry from .utils.exceptions import APIException from werkzeug.exceptions import HTTPException from .authz.middleware import authz_middleware @@ -65,11 +65,6 @@ # load blueprint for network if current_app.config["USE_BEACON_NETWORK"]: app.register_blueprint(network) - try: - init_network_service_registry() - except APIException: - # trouble setting up network, swallow for now - current_app.logger.error("API Error when initializing beacon network") # get censorship settings from katsu max_filters = None @@ -79,7 +74,7 @@ for tries in range(max_retries + 1): current_app.logger.info(f"calling katsu for censorship parameters (try={tries})") try: - max_filters, count_threshold = katsu_censorship_settings() + max_filters, count_threshold = asyncio.run(katsu_censorship_settings()) # If we got values successfully, without an API exception being raised, exit early - even if they're None break except APIException as e: @@ -102,11 +97,11 @@ @app.before_request -def before_request(): +async def before_request(): if request.blueprint != "info": validate_request() - verify_permissions() - save_request_data() + await verify_permissions() + await save_request_data() reject_query_if_not_permitted() init_response_data() diff --git a/bento_beacon/authz/access.py b/bento_beacon/authz/access.py index 7e080972..3d2b2299 100644 --- a/bento_beacon/authz/access.py +++ b/bento_beacon/authz/access.py @@ -1,8 +1,8 @@ -import functools -import requests +import aiocache +import aiohttp from flask import current_app - from .headers import auth_header_from_request +from ..utils.http import tcp_connector __all__ = [ "get_access_token", @@ -10,57 +10,63 @@ ] -@functools.cache -def get_token_endpoint_from_openid_config_url(url: str, validate_ssl: bool = True): - r = requests.get(url, verify=validate_ssl) +@aiocache.cached() +async def get_token_endpoint_from_openid_config_url(url: str): + async with aiohttp.ClientSession(connector=tcp_connector(current_app.config)) as s: + r = await s.get(url) + if not r.ok: raise Exception(f"Received not-OK response from OIDC config URL: {r.status_code}") - return r.json()["token_endpoint"] + response = await r.json() + return response["token_endpoint"] -def get_access_token() -> str | None: + +async def get_access_token() -> str | None: logger = current_app.logger oidc_config_url = current_app.config["OPENID_CONFIG_URL"] client_id = current_app.config["CLIENT_ID"] client_secret = current_app.config["CLIENT_SECRET"] - validate_ssl = current_app.config["BENTO_VALIDATE_SSL"] if not all((oidc_config_url, client_id, client_secret)): logger.error("Could not retrieve access token; one of OPENID_CONFIG_URL | CLIENT_ID | CLIENT_SECRET is not set") return None try: - token_endpoint = get_token_endpoint_from_openid_config_url(oidc_config_url, validate_ssl=validate_ssl) + token_endpoint = await get_token_endpoint_from_openid_config_url(oidc_config_url) + current_app.logger.info(f"token_endpoint: {token_endpoint}") except Exception as e: logger.error(f"Could not retrieve access token; got exception from OpenID config URL: {e}") return None - token_res = requests.post( - token_endpoint, - verify=validate_ssl, - data={ - "grant_type": "client_credentials", - "client_id": client_id, - "client_secret": client_secret, - }, - ) + async with aiohttp.ClientSession(connector=tcp_connector(current_app.config)) as s: + token_res = await s.post( + token_endpoint, + data={ + "grant_type": "client_credentials", + "client_id": client_id, + "client_secret": client_secret, + }, + ) + + res = await token_res.json() if not token_res.ok: - logger.error(f"Could not retrieve access token; got error response: {token_res.json()}") + logger.error(f"Could not retrieve access token; got error response: {res}") return None - return token_res.json()["access_token"] + return res["access_token"] -def create_access_header_or_fall_back(): +async def create_access_header_or_fall_back(): logger = current_app.logger if not current_app.config["AUTHZ_BENTO_REQUESTS_ENABLED"]: logger.warning("AUTHZ_BENTO_REQUESTS_ENABLED is false; falling back to request headers") return auth_header_from_request() - access_token = get_access_token() + access_token = await get_access_token() if access_token is None: logger.error("create_access_header_or_fall_back: falling back to request headers") return auth_header_from_request() diff --git a/bento_beacon/authz/middleware.py b/bento_beacon/authz/middleware.py index 13dfc29f..1d4d3db1 100644 --- a/bento_beacon/authz/middleware.py +++ b/bento_beacon/authz/middleware.py @@ -19,5 +19,5 @@ ) -def check_permission(permission: Permission) -> bool: - return authz_middleware.evaluate_one(request, RESOURCE_EVERYTHING, permission, mark_authz_done=True) +async def check_permission(permission: Permission) -> bool: + return await authz_middleware.async_evaluate_one(request, RESOURCE_EVERYTHING, permission, mark_authz_done=True) diff --git a/bento_beacon/config_files/config.py b/bento_beacon/config_files/config.py index ef998ab2..be725204 100644 --- a/bento_beacon/config_files/config.py +++ b/bento_beacon/config_files/config.py @@ -1,6 +1,6 @@ import json +import logging import os -import urllib3 from ..constants import GRANULARITY_COUNT, GRANULARITY_RECORD @@ -11,13 +11,15 @@ def str_to_bool(value: str) -> bool: return value.strip().lower() in ("true", "1", "t", "yes") +def reverse_domain_id(domain): + return ".".join(reversed(domain.split("."))) + ".beacon" + + BENTO_DEBUG = str_to_bool(os.environ.get("BENTO_DEBUG", os.environ.get("FLASK_DEBUG", "false"))) -BENTO_VALIDATE_SSL = str_to_bool(os.environ.get("BENTO_VALIDATE_SSL", str(not BENTO_DEBUG))) -if not BENTO_VALIDATE_SSL: - # Don't let urllib3 spam us with SSL validation warnings if we're operating with SSL validation off, most likely in - # a development/test context where we're using self-signed certificates. - urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) +# silence logspam +logging.getLogger("asyncio").setLevel(logging.WARNING) +logging.getLogger("aiocache").setLevel(logging.WARNING) class Config: @@ -40,15 +42,10 @@ class Config: DEFAULT_PAGINATION_PAGE_SIZE = 10 BENTO_DEBUG = BENTO_DEBUG - BENTO_VALIDATE_SSL = BENTO_VALIDATE_SSL - BENTO_DOMAIN = os.environ.get("BENTOV2_DOMAIN") BEACON_BASE_URL = os.environ.get("BEACON_BASE_URL") BENTO_PUBLIC_URL = os.environ.get("BENTOV2_PUBLIC_URL") - - # reverse domain id - BEACON_ID = ".".join(reversed(BENTO_DOMAIN.split("."))) + ".beacon" - + BEACON_ID = reverse_domain_id(BENTO_DOMAIN) BEACON_NAME = os.environ.get("BENTO_PUBLIC_CLIENT_NAME", "Bento") + " Beacon" BEACON_UI_ENABLED = str_to_bool(os.environ.get("BENTO_BEACON_UI_ENABLED", "")) BEACON_UI_URL = BENTO_PUBLIC_URL + "/#/en/beacon" @@ -153,9 +150,6 @@ class Config: KATSU_DATASETS_ENDPOINT = "/api/datasets" KATSU_SEARCH_ENDPOINT = "/private/search" KATSU_RESOURCES_ENDPOINT = "/api/resources" - KATSU_PHENOTYPIC_FEATURE_TERMS_ENDPOINT = "/api/phenotypic_feature_type_autocomplete" - KATSU_DISEASES_TERMS_ENDPOINT = "/api/disease_term_autocomplete" - KATSU_SAMPLED_TISSUES_TERMS_ENDPOINT = "/api/biosample_sampled_tissue_autocomplete" KATSU_PUBLIC_CONFIG_ENDPOINT = "/api/public_search_fields" KATSU_INDIVIDUAL_SCHEMA_ENDPOINT = "/api/schemas/phenopacket" KATSU_EXPERIMENT_SCHEMA_ENDPOINT = "/api/schemas/experiment" diff --git a/bento_beacon/endpoints/datasets.py b/bento_beacon/endpoints/datasets.py index f113cf30..93a2bbd5 100644 --- a/bento_beacon/endpoints/datasets.py +++ b/bento_beacon/endpoints/datasets.py @@ -9,16 +9,16 @@ @datasets.route("/datasets", methods=["GET", "POST"]) @authz_middleware.deco_public_endpoint # TODO: authz - more flexibility in what is visible (?) -def get_datasets(): - k_datasets = katsu_datasets() +async def get_datasets(): + k_datasets = await katsu_datasets() datasets_beacon_format = list(map(katsu_to_beacon_dataset_mapping, k_datasets)) return beacon_collections_response({"collections": datasets_beacon_format}) @datasets.route("/datasets/", methods=["GET", "POST"]) @authz_middleware.deco_public_endpoint # TODO: authz - more flexibility in what is visible (?) -def get_datasets_by_id(id): - k_dataset = katsu_datasets(id) +async def get_datasets_by_id(id): + k_dataset = await katsu_datasets(id) dataset_beacon_format = katsu_to_beacon_dataset_mapping(k_dataset) if k_dataset else [] return beacon_collections_response({"collections": dataset_beacon_format}) diff --git a/bento_beacon/endpoints/individuals.py b/bento_beacon/endpoints/individuals.py index 98295a09..a306855b 100644 --- a/bento_beacon/endpoints/individuals.py +++ b/bento_beacon/endpoints/individuals.py @@ -28,7 +28,7 @@ @individuals.route("/individuals", methods=["GET", "POST"]) -def get_individuals(): +async def get_individuals(): variants_query = g.beacon_query_parameters["variants_query"] phenopacket_filters = g.beacon_query_parameters["phenopacket_filters"] experiment_filters = g.beacon_query_parameters["experiment_filters"] @@ -42,10 +42,10 @@ def get_individuals(): # TODO: return default granularity rather than count (default could be bool rather than count) if no_query: add_info_to_response("no query found, returning total count") - total_count = katsu_total_individuals_count() + total_count = await katsu_total_individuals_count() if summary_stats_requested(): - add_overview_stats_to_response() - return build_query_response(numTotalResults=total_count) + await add_overview_stats_to_response() + return await build_query_response(numTotalResults=total_count) # ---------------------------------------------------------- # collect biosample ids from variant and experiment search @@ -53,9 +53,9 @@ def get_individuals(): sample_ids = [] if search_sample_ids: - sample_ids = biosample_id_search(variants_query=variants_query, experiment_filters=experiment_filters) + sample_ids = await biosample_id_search(variants_query=variants_query, experiment_filters=experiment_filters) if not sample_ids: - return zero_count_response() + return await zero_count_response() # ------------------------------- # get individuals @@ -65,38 +65,38 @@ def get_individuals(): # get individuals from katsu config search if config_filters: - config_ids = search_from_config(config_filters) + config_ids = await search_from_config(config_filters) if not config_ids: - return zero_count_response() + return await zero_count_response() individual_results["config_ids"] = config_ids if not config_search_only: # retrieve all matching individuals from sample id search, filtered by any phenopacket filters # either of phenopacket_filters or sample_ids can be empty - phenopacket_ids = katsu_filters_and_sample_ids_query(phenopacket_filters, "phenopacket", sample_ids) + phenopacket_ids = await katsu_filters_and_sample_ids_query(phenopacket_filters, "phenopacket", sample_ids) if not phenopacket_ids: - return zero_count_response() + return await zero_count_response() individual_results["phenopacket_ids"] = phenopacket_ids # baroque syntax but covers all cases individual_ids = list(reduce(set.intersection, (set(ids) for ids in individual_results.values()))) if summary_stats_requested(): - add_stats_to_response(individual_ids) + await add_stats_to_response(individual_ids) - return build_query_response(ids=individual_ids, full_record_handler=individuals_full_results) + return await build_query_response(ids=individual_ids, full_record_handler=individuals_full_results) # TODO: pagination (ideally after katsu search gets paginated) -def individuals_full_results(ids): +async def individuals_full_results(ids): # temp # if len(ids) > 100: # return {"message": "too many ids for full response"} - handover_permission = check_permission(P_DOWNLOAD_DATA) - handover = handover_for_ids(ids) if handover_permission else {} - phenopackets_by_result_set = phenopackets_for_ids(ids).get("results", {}) + handover_permission = await check_permission(P_DOWNLOAD_DATA) + handover = (await handover_for_ids(ids)) if handover_permission else {} + phenopackets_by_result_set = (await phenopackets_for_ids(ids)).get("results", {}) result_ids = list(phenopackets_by_result_set.keys()) result_sets = {} numTotalResults = 0 @@ -123,8 +123,8 @@ def individuals_full_results(ids): # forbidden / unauthorized if no permissions @individuals.route("/individuals/", methods=["GET", "POST"]) @authz_middleware.deco_require_permissions_on_resource({P_QUERY_DATA}) -def individual_by_id(id): - result_sets, numTotalResults = individuals_full_results([id]) +async def individual_by_id(id): + result_sets, numTotalResults = await individuals_full_results([id]) # return 404 if not found # only authorized users will get 404 here, so this can't be used to probe ids diff --git a/bento_beacon/endpoints/info.py b/bento_beacon/endpoints/info.py index 7df171a9..6f253264 100644 --- a/bento_beacon/endpoints/info.py +++ b/bento_beacon/endpoints/info.py @@ -13,51 +13,51 @@ info = Blueprint("info", __name__) -def overview(): +async def overview(): if current_app.config["BEACON_CONFIG"].get("useGohan"): - variants_count = gohan_counts_for_overview() + variants_count = await gohan_counts_for_overview() else: variants_count = {} - return {"counts": {"individuals": katsu_total_individuals_count(), "variants": variants_count}} + return {"counts": {"individuals": await katsu_total_individuals_count(), "variants": variants_count}} # service-info in ga4gh format @info.route("/service-info") @authz_middleware.deco_public_endpoint -def service_info(): +async def service_info(): # plain response without beacon wrappers - return current_app.config.get("BEACON_GA4GH_SERVICE_INFO", build_ga4gh_service_info()) + return current_app.config.get("BEACON_GA4GH_SERVICE_INFO", await build_ga4gh_service_info()) # service info in beacon format @info.route("/") @authz_middleware.deco_public_endpoint -def beacon_info(): - return beacon_info_response(current_app.config.get("SERVICE_DETAILS", build_service_details())) +async def beacon_info(): + return beacon_info_response(current_app.config.get("SERVICE_DETAILS", await build_service_details())) # as above but with beacon overview details @info.route("/info") @authz_middleware.deco_public_endpoint -def beacon_info_with_overview(): - service_info = current_app.config.get("SERVICE_DETAILS", build_service_details()) - return beacon_info_response({**service_info, "overview": overview()}) +async def beacon_info_with_overview(): + service_info = current_app.config.get("SERVICE_DETAILS", await build_service_details()) + return beacon_info_response({**service_info, "overview": await overview()}) @info.route("/filtering_terms") @authz_middleware.deco_public_endpoint -def filtering_terms(): - filtering_terms = get_filtering_terms() +async def filtering_terms(): + filtering_terms = await get_filtering_terms() return beacon_info_response({"resources": [], "filteringTerms": filtering_terms}) # distinct from "BEACON_CONFIG" @info.route("/configuration") @authz_middleware.deco_public_endpoint -def beacon_configuration(): +async def beacon_configuration(): return beacon_info_response( - current_app.config.get("CONFIGURATION_ENDPOINT_RESPONSE", build_configuration_endpoint_response()) + current_app.config.get("CONFIGURATION_ENDPOINT_RESPONSE", await build_configuration_endpoint_response()) ) @@ -77,9 +77,9 @@ def beacon_map(): # custom endpoint not in beacon spec @info.route("/overview") @authz_middleware.deco_public_endpoint -def beacon_overview(): - service_info = current_app.config.get("SERVICE_DETAILS", build_service_details()) - return beacon_info_response({**service_info, "overview": overview()}) +async def beacon_overview(): + service_info = current_app.config.get("SERVICE_DETAILS", await build_service_details()) + return beacon_info_response({**service_info, "overview": await overview()}) # ------------------------------------------------------- @@ -89,14 +89,14 @@ def beacon_overview(): @info.route("/individual_schema", methods=["GET", "POST"]) @authz_middleware.deco_public_endpoint -def get_individual_schema(): - return katsu_get(current_app.config["KATSU_INDIVIDUAL_SCHEMA_ENDPOINT"], requires_auth="none") +async def get_individual_schema(): + return await katsu_get(current_app.config["KATSU_INDIVIDUAL_SCHEMA_ENDPOINT"], requires_auth="none") @info.route("/experiment_schema", methods=["GET", "POST"]) @authz_middleware.deco_public_endpoint -def get_experiment_schema(): - return katsu_get(current_app.config["KATSU_EXPERIMENT_SCHEMA_ENDPOINT"], requires_auth="none") +async def get_experiment_schema(): + return await katsu_get(current_app.config["KATSU_EXPERIMENT_SCHEMA_ENDPOINT"], requires_auth="none") # ------------------------------------------------------- @@ -105,7 +105,7 @@ def get_experiment_schema(): # these return the appropriate response but also save as a side effect -def build_service_details(): +async def build_service_details(): # build info response in beacon format info = current_app.config["BEACON_CONFIG"].get("serviceInfo") s = { @@ -124,7 +124,7 @@ def build_service_details(): # retrieve dataset description from DATS # may be multiple datasets, so collect all descriptions into one string # for custom description, add a "description" field to service info in beacon_config.json - k_datasets = katsu_datasets() + k_datasets = await katsu_datasets() dats_array = list(map(lambda d: d.get("datsFile", {}), k_datasets)) description = " ".join([d.get("description") for d in dats_array if "description" in d]) custom_description = info.get("description") @@ -137,9 +137,9 @@ def build_service_details(): return s -def build_ga4gh_service_info(): +async def build_ga4gh_service_info(): # construct from beacon-format info - info = current_app.config.get("SERVICE_DETAILS", build_service_details()) + info = current_app.config.get("SERVICE_DETAILS", await build_service_details()) s = { "id": info["id"], @@ -157,16 +157,17 @@ def build_ga4gh_service_info(): s["description"] = description current_app.config["BEACON_GA4GH_SERVICE_INFO"] = s + return s -def build_configuration_endpoint_response(): +async def build_configuration_endpoint_response(): entry_types_details = current_app.config.get("ENTRY_TYPES", build_entry_types()) # production status is one of "DEV", "PROD", "TEST" # while environment is one of "dev", "prod", "test", "staging".. generally only use "dev" or "prod" production_status = ( - current_app.config.get("SERVICE_DETAILS", build_service_details()).get("environment", "error").upper() + current_app.config.get("SERVICE_DETAILS", await build_service_details()).get("environment", "error").upper() ) response = { diff --git a/bento_beacon/endpoints/variants.py b/bento_beacon/endpoints/variants.py index 23bbbdae..05994bd7 100644 --- a/bento_beacon/endpoints/variants.py +++ b/bento_beacon/endpoints/variants.py @@ -10,7 +10,7 @@ # returns count or boolean only @variants.route("/g_variants", methods=["GET", "POST"]) @authz_middleware.deco_public_endpoint # TODO: for now. eventually, return more depending on permissions -def get_variants(): +async def get_variants(): variants_query = g.beacon_query_parameters["variants_query"] phenopacket_filters = g.beacon_query_parameters["phenopacket_filters"] experiment_filters = g.beacon_query_parameters["experiment_filters"] @@ -20,41 +20,41 @@ def get_variants(): # if no query, return total count of variants if not (variants_query or has_filters): add_info_to_response("no query found, returning total count") - total_count = gohan_total_variants_count() - return build_query_response(numTotalResults=total_count) + total_count = await gohan_total_variants_count() + return await build_query_response(numTotalResults=total_count) # collect biosample ids from all filters sample_ids = [] if has_filters: - sample_ids = biosample_id_search( + sample_ids = await biosample_id_search( phenopacket_filters=phenopacket_filters, experiment_filters=experiment_filters, config_filters=config_filters, ) if not sample_ids: - return zero_count_response() + return await zero_count_response() # finally, find relevant variants, depending on whether a variants query was made if variants_query: # gohan search returns uppercase only sample_ids = [id.upper() for id in sample_ids] - variant_results = query_gohan(variants_query, "record", ids_only=False) + variant_results = await query_gohan(variants_query, "record", ids_only=False) if has_filters: - variant_results = list(filter(lambda v: v.get("sample_id") in sample_ids, variant_results)) - gohan_count = len(variant_results) + variant_results_list = list(filter(lambda v: v.get("sample_id") in sample_ids, variant_results)) + gohan_count = len(variant_results_list) else: # gohan overview returns lowercase only sample_ids = [id.lower() for id in sample_ids] - variant_totals = gohan_totals_by_sample_id() + variant_totals = await gohan_totals_by_sample_id() if has_filters: gohan_count = sum(variant_totals.get(id) for id in sample_ids if id in variant_totals) else: gohan_count = sum(variant_totals.values()) - return build_query_response(numTotalResults=gohan_count) + return await build_query_response(numTotalResults=gohan_count) # ------------------------------------------------------- diff --git a/bento_beacon/network/network.py b/bento_beacon/network/network.py index 5a3e0c3a..e8789c22 100644 --- a/bento_beacon/network/network.py +++ b/bento_beacon/network/network.py @@ -1,25 +1,26 @@ from flask import current_app, request, Blueprint -from ..utils.exceptions import APIException, NotFoundException -from .utils import network_beacon_get, network_beacon_post, host_beacon_response, filters_intersection, filters_union +from ..utils.exceptions import NotFoundException +from .utils import network_beacon_get, network_beacon_post, host_beacon_response, init_network_service_registry + network = Blueprint("network", __name__, url_prefix="/network") # TODOs: # filtering terms XXXXXXXXXXXXXXXXXXXXXXXXXXX -# /service-info? there's already one at beacon root -# async calls - -# standard beacon info endpoints at the network level: /map, /configuration, etc # handle GET args +# and perhaps standard beacon info endpoints at the network level: /map, /configuration, /service-info etc +# these are only useful if we plan to organize hierarchical networks +# (by e.g. adding our network as a single beacon to another network) + @network.route("") @network.route("/beacons") -def network_beacons(): - beacons_dict = current_app.config.get("NETWORK_BEACONS") - if not beacons_dict: - raise APIException("no beacons found in network config") +async def network_beacons(): + + beacons_dict = await init_network_service_registry() + current_app.config["NETWORK_BEACONS"] = beacons_dict # filters handling still experimental return { @@ -31,7 +32,7 @@ def network_beacons(): # returns 404 if endpoint missing @network.route("/beacons//", methods=["GET", "POST"]) -def query(beacon_id, endpoint): +async def query(beacon_id, endpoint): beacon = current_app.config["NETWORK_BEACONS"].get(beacon_id) if not beacon: @@ -43,16 +44,16 @@ def query(beacon_id, endpoint): # special handling for host beacon, avoid circular http calls host_id = current_app.config["BEACON_ID"] if beacon_id == host_id: - return host_beacon_response(endpoint) + return await host_beacon_response(endpoint) # all other beacons api_url = beacon.get("apiUrl") if request.method == "POST": payload = request.get_json() - r = network_beacon_post(api_url, payload, endpoint) + r = await network_beacon_post(api_url, payload, endpoint) else: # TODO: pass get args - r = network_beacon_get(api_url, endpoint) + r = await network_beacon_get(api_url, endpoint) return r diff --git a/bento_beacon/network/utils.py b/bento_beacon/network/utils.py index 63f93e4a..dadb7aad 100644 --- a/bento_beacon/network/utils.py +++ b/bento_beacon/network/utils.py @@ -1,7 +1,9 @@ -import requests +import aiohttp +import asyncio from flask import current_app from urllib.parse import urlsplit, urlunsplit from json import JSONDecodeError +from ..utils.http import tcp_connector from ..utils.exceptions import APIException from ..utils.katsu_utils import overview_statistics, get_katsu_config_search_fields from ..endpoints.info import build_service_details, overview @@ -16,7 +18,6 @@ # to deprecate in Bento 18 PUBLIC_SEARCH_FIELDS_PATH = "/api/metadata/api/public_search_fields" - DEFAULT_ENDPOINT = "individuals" OVERVIEW_STATS_QUERY = { "meta": {"apiVersion": "2.0.0"}, @@ -34,13 +35,13 @@ # get network node info for this beacon, which is also hosting the network # call methods directly instead of circular http calls -def info_for_host_beacon(): - service_details = build_service_details() +async def info_for_host_beacon(): + service_details = await build_service_details() # TODO: fix ugly overlapping overview functions # requires rolling out changes to all beacons first - bento_overview = overview() - bento_private_overview = overview_statistics() + bento_overview = await overview() + bento_private_overview = await overview_statistics() experiment_stats = {"count": bento_private_overview.get("count", 0)} biosample_stats = { "count": bento_private_overview.get("phenopacket", {}) @@ -54,20 +55,19 @@ def info_for_host_beacon(): return { **service_details, "apiUrl": api_url, - "b_id": current_app.config["BEACON_ID"], "overview": { "individuals": {"count": bento_overview.get("counts", {}).get("individuals")}, "variants": bento_overview.get("counts", {}).get("variants", {}), "biosamples": biosample_stats, "experiments": experiment_stats, }, - "querySections": get_katsu_config_search_fields(requires_auth="none").get("sections", []), + "querySections": (await get_katsu_config_search_fields(requires_auth="none")).get("sections", []), } -def host_beacon_response(endpoint): +async def host_beacon_response(endpoint): # endpoint already known to be valid - return HOST_VIEWS_BY_ENDPOINT[endpoint]() + return await HOST_VIEWS_BY_ENDPOINT[endpoint]() def has_variants_query(payload): @@ -77,117 +77,122 @@ def has_variants_query(payload): return bool(query) -def network_beacon_call(method, url, payload=None): +async def network_beacon_call(method, url, payload=None): + c = current_app.config current_app.logger.info(f"Calling network url: {url}") timeout = ( - current_app.config["NETWORK_VARIANTS_QUERY_TIMEOUT_SECONDS"] + c["NETWORK_VARIANTS_QUERY_TIMEOUT_SECONDS"] if has_variants_query(payload) - else current_app.config["NETWORK_DEFAULT_TIMEOUT_SECONDS"] + else c["NETWORK_DEFAULT_TIMEOUT_SECONDS"] ) try: - if method == "GET": - r = requests.get(url, timeout=timeout) - else: - r = requests.post(url, json=payload, timeout=timeout) - beacon_response = r.json() - - except (requests.exceptions.RequestException, JSONDecodeError) as e: - current_app.logger.error(e) + async with aiohttp.ClientSession(connector=tcp_connector(c)) as s: + if method == "GET": + r = await s.get(url, timeout=timeout) + else: + r = await s.post(url, timeout=timeout, json=payload) + + if not r.ok: + raise APIException() + + beacon_response = await r.json() + + except (APIException, aiohttp.ClientError, JSONDecodeError) as e: msg = f"beacon network error calling url {url}: {e}" raise APIException(message=msg) return beacon_response -def network_beacon_get(root_url, endpoint=None): +async def network_beacon_get(root_url, endpoint=None): url = root_url if endpoint is None else root_url + "/" + endpoint - return network_beacon_call("GET", url) + return await network_beacon_call("GET", url) -def network_beacon_post(root_url, payload={}, endpoint=None): +async def network_beacon_post(root_url, payload={}, endpoint=None): url = root_url if endpoint is None else root_url + "/" + endpoint - return network_beacon_call("POST", url, payload) + return await network_beacon_call("POST", url, payload) def make_network_filtering_terms(beacons): - all_query_sections = [b["querySections"] for b in beacons.values()] + all_query_sections = [b.get("querySections", {}) for b in beacons.values()] current_app.config["ALL_NETWORK_FILTERS"] = filters_union(all_query_sections) current_app.config["COMMON_NETWORK_FILTERS"] = filters_intersection(all_query_sections) pass -def init_network_service_registry(): - current_app.logger.info("registering beacons") +async def call_network_beacon_for_init(url): + beacon_info = {"apiUrl": url} + + try: + + b = (await network_beacon_get(url, endpoint="overview")).get("response") + beacon_info.update(b) + + # organize overview stats + # TODO (Redmine #2170) modify beacon /overview response + # .... so we don't have to make two calls here, with different response formats + individual_and_variant_stats = b.get("overview", {}).get("counts") + biosample_and_experiment_stats = ( + (await network_beacon_post(url, OVERVIEW_STATS_QUERY, DEFAULT_ENDPOINT)).get("info", {}).get("bento") + ) + + beacon_info["overview"] = { + "individuals": {"count": individual_and_variant_stats.get("individuals")}, + "variants": individual_and_variant_stats.get("variants"), + **biosample_and_experiment_stats, + } + + # temp, call katsu for bento public "query_sections" + # TODO change to beacon spec filters, don't call katsu + beacon_info["querySections"] = (await get_public_search_fields(url)).get("sections", []) + + except APIException as e: + current_app.logger.error(f"failed trying to initialize network beacon {url}") + raise e + + return beacon_info + + +async def init_network_service_registry(): urls = current_app.config["NETWORK_URLS"] if not urls: current_app.logger.error("can't find urls for beacon network, did you forget a config file?") - # this isn't driven by a request, so no point serving API error response here - return - network_beacons = {} - failed_beacons = [] + raise APIException("can't find urls for beacon network") + + current_app.logger.info(f"registering {len(urls)} beacons") + host_beacon_url = current_app.config["BEACON_BASE_URL"] - current_app.logger.debug(f"host url: {host_beacon_url}") + + calls = [] for url in urls: # special handling for calling the beacon this network is hosted on if url == host_beacon_url: - host_id = current_app.config["BEACON_ID"] - network_beacons[host_id] = info_for_host_beacon() + calls.append(info_for_host_beacon()) continue # all other beacons - try: - b = network_beacon_get(url, endpoint="overview") - beacon_info = b.get("response") - - except APIException: - failed_beacons.append(url) - current_app.logger.error(f"error contacting network beacon {url}") - continue + calls.append(call_network_beacon_for_init(url)) - if not beacon_info: - failed_beacons.append(url) - current_app.logger.error(f"bad response from network beacon {url}") - continue + results = await asyncio.gather(*calls, return_exceptions=True) - beacon_info["apiUrl"] = url + # filter out any failed calls + registered_beacons = [b for b in results if not isinstance(b, Exception)] - # organize overview stats - # TODO (Redmine #2170) modify beacon /overview so we don't have to make two calls here, with different response formats + current_app.logger.info(f"registered {len(registered_beacons)} beacon(s) in network") + num_failed = len(results) - len(registered_beacons) + if num_failed: + current_app.logger.info(f"{num_failed} beacon(s) failed to register") - # TODO: filters here?? - biosample_and_experiment_stats = ( - network_beacon_post(url, OVERVIEW_STATS_QUERY, DEFAULT_ENDPOINT).get("info", {}).get("bento") - ) - individual_and_variant_stats = beacon_info.get("overview", {}).get("counts") + # dict by beacon id easier to work with elsewhere + beacon_dict = {b["id"]: b for b in registered_beacons} - overview = { - "individuals": {"count": individual_and_variant_stats.get("individuals")}, - "variants": individual_and_variant_stats.get("variants"), - **biosample_and_experiment_stats, - } - - b_id = beacon_info.get("id") - network_beacons[b_id] = beacon_info - network_beacons[b_id]["overview"] = overview - - # Note: v15 katsu does not respond here - # TODO (longer): serve beacon spec filtering terms instead of bento public querySections - network_beacons[b_id]["querySections"] = get_public_search_fields(url).get("sections", []) # temp - - # make a merged overview? - # what about merged filtering_terms? - current_app.logger.info( - f"registered {len(network_beacons)} beacon{'' if len(network_beacons) == 1 else 's'} in network: {', '.join(network_beacons)}" - ) - if failed_beacons: - current_app.logger.error( - f"{len(failed_beacons)} network beacon{'' if len(failed_beacons) == 1 else 's'} failed to respond: {', '.join(failed_beacons)}" - ) + make_network_filtering_terms(beacon_dict) + current_app.config["NETWORK_BEACONS"] = beacon_dict - make_network_filtering_terms(network_beacons) - current_app.config["NETWORK_BEACONS"] = network_beacons + return beacon_dict ########################################## @@ -195,10 +200,10 @@ def init_network_service_registry(): # deprecate in Bento 18 -def get_public_search_fields(beacon_url): +async def get_public_search_fields(beacon_url): fields_url = public_search_fields_url(beacon_url) current_app.logger.info(f"trying public fields url {fields_url}") - fields = network_beacon_get(fields_url) + fields = await network_beacon_get(fields_url) return fields diff --git a/bento_beacon/utils/beacon_request.py b/bento_beacon/utils/beacon_request.py index 0a44588c..721d9538 100644 --- a/bento_beacon/utils/beacon_request.py +++ b/bento_beacon/utils/beacon_request.py @@ -105,7 +105,7 @@ def package_get_params(params): return {"meta": meta, "query": query} -def save_request_data(): +async def save_request_data(): defaults = request_defaults() if request.method == "POST": @@ -129,7 +129,7 @@ def save_request_data(): request_data["requestParameters"] = query_request_parameters if query_filters: - reject_if_too_many_filters(query_filters) + await reject_if_too_many_filters(query_filters) request_data["filters"] = query_filters if request_bento: @@ -172,6 +172,6 @@ def summary_stats_requested(): return g.request_data.get("bento", {}).get("showSummaryStatistics") -def verify_permissions(): +async def verify_permissions(): # can do much more here in the future - g.permission_query_data = check_permission(P_QUERY_DATA) + g.permission_query_data = await check_permission(P_QUERY_DATA) diff --git a/bento_beacon/utils/beacon_response.py b/bento_beacon/utils/beacon_response.py index d995dfda..5f2db04e 100644 --- a/bento_beacon/utils/beacon_response.py +++ b/bento_beacon/utils/beacon_response.py @@ -31,23 +31,23 @@ def add_no_results_censorship_message_to_response(): add_info_to_response(f"censorship threshold: {current_app.config['COUNT_THRESHOLD']}") -def add_stats_to_response(ids): - if ids is not None and len(ids) <= get_censorship_threshold(): +async def add_stats_to_response(ids): + if ids is not None and len(ids) <= (await get_censorship_threshold()): return if ids is None: - stats = overview_statistics() + stats = await overview_statistics() else: - stats = search_summary_statistics(ids) - packaged_stats = package_biosample_and_experiment_stats(stats) + stats = await search_summary_statistics(ids) + packaged_stats = await package_biosample_and_experiment_stats(stats) g.response_info["bento"] = packaged_stats -def add_overview_stats_to_response(): - add_stats_to_response(None) +async def add_overview_stats_to_response(): + await add_stats_to_response(None) -def package_biosample_and_experiment_stats(stats): +async def package_biosample_and_experiment_stats(stats): phenopacket_dts_stats = stats.get("phenopacket", {}).get("data_type_specific", {}) experiment_stats = stats.get("experiment", {}).get("data_type_specific", {}).get("experiments", {}) @@ -64,12 +64,12 @@ def package_biosample_and_experiment_stats(stats): return { "biosamples": { - "count": censored_count(biosamples_count), - "sampled_tissue": censored_chart_data(sampled_tissue_data), + "count": await censored_count(biosamples_count), + "sampled_tissue": await censored_chart_data(sampled_tissue_data), }, "experiments": { - "count": censored_count(experiments_count), - "experiment_type": censored_chart_data(experiment_type_data), + "count": await censored_count(experiments_count), + "experiment_type": await censored_chart_data(experiment_type_data), }, } @@ -123,11 +123,11 @@ def response_granularity(): raise APIException() -def build_query_response(ids=None, numTotalResults=None, full_record_handler=None): +async def build_query_response(ids=None, numTotalResults=None, full_record_handler=None): granularity = response_granularity() count = len(ids) if numTotalResults is None else numTotalResults - returned_count = censored_count(count) - if returned_count == 0 and get_censorship_threshold() > 0: + returned_count = await censored_count(count) + if returned_count == 0 and (await get_censorship_threshold()) > 0: add_no_results_censorship_message_to_response() if granularity == GRANULARITY_BOOLEAN: return beacon_boolean_response(returned_count) @@ -246,8 +246,8 @@ def beacon_error_response(message, status_code): return {"meta": response_meta([], None), "error": {"errorCode": status_code, "errorMessage": message}} -def zero_count_response(): - return build_query_response(ids=[]) +async def zero_count_response(): + return await build_query_response(ids=[]) # -------------------------------- diff --git a/bento_beacon/utils/censorship.py b/bento_beacon/utils/censorship.py index 5fc564de..1d81b5ae 100644 --- a/bento_beacon/utils/censorship.py +++ b/bento_beacon/utils/censorship.py @@ -12,8 +12,8 @@ def set_censorship_settings(max_filters, count_threshold): # saves settings to config as a side effect -def censorship_retry() -> tuple[int | None, int | None]: - max_filters, count_threshold = katsu_censorship_settings() +async def censorship_retry() -> tuple[int | None, int | None]: + max_filters, count_threshold = await katsu_censorship_settings() if max_filters is None or count_threshold is None: raise APIException( message="error reading censorship settings from katsu: " @@ -27,48 +27,48 @@ def censorship_retry() -> tuple[int | None, int | None]: return max_filters, count_threshold -def threshold_retry() -> int | None: - _, count_threshold = censorship_retry() +async def threshold_retry() -> int | None: + _, count_threshold = await censorship_retry() return count_threshold -def max_filters_retry() -> int | None: - max_filters, _ = censorship_retry() +async def max_filters_retry() -> int | None: + max_filters, _ = await censorship_retry() return max_filters -def get_censorship_threshold(): +async def get_censorship_threshold(): if g.permission_query_data: return 0 threshold = current_app.config["COUNT_THRESHOLD"] - return threshold if threshold is not None else threshold_retry() + return threshold if threshold is not None else (await threshold_retry()) -def censored_count(count): - t = get_censorship_threshold() +async def censored_count(count): + t = await get_censorship_threshold() if count <= t: return 0 return count # we don't have the same option of returning zero here -def get_max_filters(): +async def get_max_filters(): max_filters = current_app.config["MAX_FILTERS"] - return max_filters if max_filters is not None else max_filters_retry() + return max_filters if max_filters is not None else await max_filters_retry() # ugly side-effect code, but keeps censorship code together -def reject_if_too_many_filters(filters): +async def reject_if_too_many_filters(filters): if g.permission_query_data: return - max_filters = get_max_filters() + max_filters = await get_max_filters() if len(filters) > max_filters: raise InvalidQuery(f"too many filters in request, maximum of {max_filters} permitted") # at some point may want to show censored fields as zero rather than removing entirely -def censored_chart_data(data): - t = get_censorship_threshold() # zero with correct permissions +async def censored_chart_data(data): + t = await get_censorship_threshold() # zero with correct permissions return [{"label": d["label"], "value": d["value"]} for d in data if d["value"] > t] diff --git a/bento_beacon/utils/gohan_utils.py b/bento_beacon/utils/gohan_utils.py index a2b5c34b..3c0dcf64 100644 --- a/bento_beacon/utils/gohan_utils.py +++ b/bento_beacon/utils/gohan_utils.py @@ -1,7 +1,8 @@ -import requests +import aiohttp from flask import current_app -from .exceptions import APIException, InvalidQuery, NotImplemented +from .http import tcp_connector, aiohttp_params from ..authz.access import create_access_header_or_fall_back +from .exceptions import APIException, InvalidQuery, NotImplemented from .reference import gene_position_lookup # ------------------------------------------------------- @@ -57,7 +58,7 @@ def zero_to_one(start, end=None): # ------------------------------------------------------- -def query_gohan(beacon_args, granularity, ids_only=False): +async def query_gohan(beacon_args, granularity, ids_only=False): # control flow for beacon variant query types # http://docs.genomebeacons.org/variant-queries/ start = beacon_args.get("start") @@ -75,26 +76,26 @@ def query_gohan(beacon_args, granularity, ids_only=False): if geneId_query: if start is not None or end is not None: raise InvalidQuery("invalid mix of geneId and start/end parameters") - return geneId_query_to_gohan(beacon_args, granularity, ids_only) + return await geneId_query_to_gohan(beacon_args, granularity, ids_only) # required everywhere except geneId query if beacon_args.get("referenceName") is None: raise InvalidQuery(message="referenceName parameter required") if sequence_query: - return sequence_query_to_gohan(beacon_args, granularity, ids_only) + return await sequence_query_to_gohan(beacon_args, granularity, ids_only) if range_query: - return range_query_to_gohan(beacon_args, granularity, ids_only) + return await range_query_to_gohan(beacon_args, granularity, ids_only) if bracket_query: - return bracket_query_to_gohan(beacon_args, granularity, ids_only) + return await bracket_query_to_gohan(beacon_args, granularity, ids_only) # no other cases raise InvalidQuery() -def sequence_query_to_gohan(beacon_args, granularity, ids_only): +async def sequence_query_to_gohan(beacon_args, granularity, ids_only): current_app.logger.debug("SEQUENCE QUERY") gohan_args = beacon_to_gohan_generic_mapping(beacon_args) @@ -110,41 +111,41 @@ def sequence_query_to_gohan(beacon_args, granularity, ids_only): gohan_args["upperBound"] = gohan_args["lowerBound"] gohan_args["getSampleIdsOnly"] = ids_only - return generic_gohan_query(gohan_args, granularity, ids_only) + return await generic_gohan_query(gohan_args, granularity, ids_only) # optional params # variantType OR alternateBases OR aminoacidChange # variantMinLength # variantMaxLength -def range_query_to_gohan(beacon_args, granularity, ids_only): +async def range_query_to_gohan(beacon_args, granularity, ids_only): current_app.logger.debug("RANGE QUERY") gohan_args = beacon_to_gohan_generic_mapping(beacon_args) gohan_args["lowerBound"], gohan_args["upperBound"] = zero_to_one(beacon_args["start"][0], beacon_args["end"][0]) gohan_args["getSampleIdsOnly"] = ids_only - return generic_gohan_query(gohan_args, granularity, ids_only) + return await generic_gohan_query(gohan_args, granularity, ids_only) -def bracket_query_to_gohan(beacon_args, granularity, ids_only): +async def bracket_query_to_gohan(beacon_args, granularity, ids_only): current_app.logger.debug("BRACKET QUERY") # TODO # either implement here by filtering full results, or implement directly in gohan raise NotImplemented(message="variant bracket query not implemented") -def geneId_query_to_gohan(beacon_args, granularity, ids_only): +async def geneId_query_to_gohan(beacon_args, granularity, ids_only): current_app.logger.debug("GENE ID QUERY") gene_id = beacon_args.get("geneId") assembly_from_query = beacon_args.get("assemblyId") # query all assemblies present in gohan if not specified - assemblies = [assembly_from_query] if assembly_from_query is not None else gohan_assemblies() + assemblies = [assembly_from_query] if assembly_from_query is not None else await gohan_assemblies() gohan_args = beacon_to_gohan_generic_mapping(beacon_args) gohan_results = [] # TODO: async for assembly in assemblies: - gene_info = gene_position_lookup(gene_id, assembly) + gene_info = await gene_position_lookup(gene_id, assembly) if not gene_info: continue @@ -157,32 +158,32 @@ def geneId_query_to_gohan(beacon_args, granularity, ids_only): "getSampleIdsOnly": ids_only, } - gohan_results.extend(generic_gohan_query(gohan_args_this_query, granularity, ids_only)) + gohan_results.extend(await generic_gohan_query(gohan_args_this_query, granularity, ids_only)) return gohan_results -def generic_gohan_query(gohan_args, granularity, ids_only): +async def generic_gohan_query(gohan_args, granularity, ids_only): if ids_only: - return gohan_ids_only_query(gohan_args, granularity) + return await gohan_ids_only_query(gohan_args, granularity) if granularity == "record": - return gohan_full_record_query(gohan_args) + return await gohan_full_record_query(gohan_args) # count or boolean query follows config = current_app.config query_url = config["GOHAN_BASE_URL"] + config["GOHAN_COUNT_ENDPOINT"] current_app.logger.debug(f"launching gohan query: {gohan_args}") - results = gohan_results(query_url, gohan_args) + results = await gohan_results(query_url, gohan_args) count = results.get("count") if results else None return {"count": count} -def gohan_ids_only_query(gohan_args, granularity): +async def gohan_ids_only_query(gohan_args, granularity): config = current_app.config query_url = config["GOHAN_BASE_URL"] + config["GOHAN_SEARCH_ENDPOINT"] current_app.logger.debug(f"launching gohan query: {gohan_args}") - results = gohan_results(query_url, gohan_args) + results = await gohan_results(query_url, gohan_args) return unpackage_sample_ids(results) @@ -191,33 +192,32 @@ def unpackage_sample_ids(results): return list(map(lambda r: r.get("sample_id"), calls)) -def gohan_results(url, gohan_args): - response = gohan_network_call(url, gohan_args) +async def gohan_results(url, gohan_args): + response = await gohan_network_call(url, gohan_args) results_array = response.get("results") results = results_array[0] if results_array else None return results -def gohan_network_call(url, gohan_args): +async def gohan_network_call(url, gohan_args): c = current_app.config - try: - r = requests.get( - url, - headers=create_access_header_or_fall_back(), - params=gohan_args, - timeout=c["GOHAN_TIMEOUT"], - verify=c["BENTO_VALIDATE_SSL"], - ) + async with aiohttp.ClientSession(connector=tcp_connector(c)) as s: + r = await s.get( + url, + headers=await create_access_header_or_fall_back(), + timeout=c["GOHAN_TIMEOUT"], + params=aiohttp_params(gohan_args), + ) # handle gohan errors or any bad responses if not r.ok: - current_app.logger.warning(f"gohan error, status: {r.status_code}, message: {r.text}") + current_app.logger.warning(f"gohan error, status: {r.status}, message: {r.text}") raise APIException(message="error searching gohan variants service") - gohan_response = r.json() + gohan_response = await r.json() - except requests.exceptions.RequestException as e: + except aiohttp.ClientError as e: current_app.logger.error(f"gohan error: {e}") raise APIException(message="error calling gohan variants service") @@ -225,39 +225,39 @@ def gohan_network_call(url, gohan_args): # currently used internally only -def gohan_full_record_query(gohan_args): +async def gohan_full_record_query(gohan_args): config = current_app.config query_url = config["GOHAN_BASE_URL"] + config["GOHAN_SEARCH_ENDPOINT"] - response = gohan_results(query_url, gohan_args) + response = await gohan_results(query_url, gohan_args) return response.get("calls") -def gohan_overview(): +async def gohan_overview(): config = current_app.config url = config["GOHAN_BASE_URL"] + config["GOHAN_OVERVIEW_ENDPOINT"] - return gohan_network_call(url, {}) + return await gohan_network_call(url, {}) -def gohan_totals_by_sample_id(): - return gohan_overview().get("sampleIDs", {}) +async def gohan_totals_by_sample_id(): + return (await gohan_overview()).get("sampleIDs", {}) -def gohan_total_variants_count(): - totals_by_id = gohan_totals_by_sample_id() +async def gohan_total_variants_count(): + totals_by_id = await gohan_totals_by_sample_id() return sum(totals_by_id.values()) -def gohan_counts_by_assembly_id(): - return gohan_overview().get("assemblyIDs", {}) +async def gohan_counts_by_assembly_id(): + return (await gohan_overview()).get("assemblyIDs", {}) -def gohan_assemblies(): - return list(gohan_overview().get("assemblyIDs", {}).keys()) +async def gohan_assemblies(): + return list((await gohan_overview()).get("assemblyIDs", {}).keys()) # only runs if "useGohan" true -def gohan_counts_for_overview(): - return gohan_counts_by_assembly_id() +async def gohan_counts_for_overview(): + return await gohan_counts_by_assembly_id() # -------------------------------------------- diff --git a/bento_beacon/utils/handover_utils.py b/bento_beacon/utils/handover_utils.py index f60a3293..0eeb310d 100644 --- a/bento_beacon/utils/handover_utils.py +++ b/bento_beacon/utils/handover_utils.py @@ -1,20 +1,23 @@ +import aiohttp from flask import current_app -import requests from urllib.parse import urlsplit, urlunsplit from .katsu_utils import katsu_network_call from .exceptions import APIException +from .http import tcp_connector from ..authz.headers import auth_header_from_request DRS_TIMEOUT_SECONDS = 10 -def drs_url_components(): - return urlsplit(current_app.config["DRS_URL"]) +def drs_url_components(c): + return urlsplit(c["DRS_URL"]) -def drs_network_call(path, query): - base_url_components = drs_url_components() +async def drs_network_call(path, query): + c = current_app.config + + base_url_components = drs_url_components(c) url = urlunsplit( ( base_url_components.scheme, @@ -26,30 +29,26 @@ def drs_network_call(path, query): ) try: - r = requests.get( - url, - headers=auth_header_from_request(), - timeout=DRS_TIMEOUT_SECONDS, - verify=not current_app.config.get("BENTO_DEBUG"), - ) - drs_response = r.json() + async with aiohttp.ClientSession(connector=tcp_connector(c)) as s: + r = await s.get(url, headers=auth_header_from_request(), timeout=DRS_TIMEOUT_SECONDS) + drs_response = await r.json() # TODO # on handover errors, keep returning rest of results instead of throwing api exception # add optional note in g and add to beacon response # return {} - except requests.exceptions.RequestException as e: + except aiohttp.ClientError as e: current_app.logger.error(f"drs error: {e}") raise APIException(message="error generating handover links") return drs_response -def drs_object_from_filename(filename): - return drs_network_call("/search", f"name={filename}") +async def drs_object_from_filename(filename): + return await drs_network_call("/search", f"name={filename}") -def filenames_by_results_set(ids): +async def filenames_by_results_set(ids): if not ids: return {} @@ -61,7 +60,7 @@ def filenames_by_results_set(ids): "field": ["biosamples", "[item]", "experiment", "[item]", "experiment_results", "[item]", "filename"], } - response = katsu_network_call(payload) + response = await katsu_network_call(payload) results = response.get("results") files_by_results_set = {} @@ -76,8 +75,8 @@ def filenames_by_results_set(ids): return files_by_results_set -def drs_link_from_vcf_filename(filename): - obj = drs_object_from_filename(filename) +async def drs_link_from_vcf_filename(filename): + obj = await drs_object_from_filename(filename) if not obj: return None @@ -99,18 +98,18 @@ def vcf_handover_entry(url, note=None): return entry -def handover_for_ids(ids): +async def handover_for_ids(ids): # ideally we would preserve the mapping between ids and links, # but this requires changes in katsu to do well handovers = {} - files_for_results = filenames_by_results_set(ids) + files_for_results = await filenames_by_results_set(ids) for results_set, files in files_for_results.items(): handovers_this_set = [] for f in files: - link = drs_link_from_vcf_filename(f) + link = await drs_link_from_vcf_filename(f) if link: handovers_this_set.append(vcf_handover_entry(link)) handovers[results_set] = handovers_this_set diff --git a/bento_beacon/utils/http.py b/bento_beacon/utils/http.py new file mode 100644 index 00000000..96d519f9 --- /dev/null +++ b/bento_beacon/utils/http.py @@ -0,0 +1,10 @@ +import aiohttp + + +def tcp_connector(c): + return aiohttp.TCPConnector(verify_ssl=not c["BENTO_DEBUG"]) + + +# aiohttp refuses to encode bools +def aiohttp_params(p): + return {k: (str(v) if isinstance(v, bool) else v) for k, v in p.items()} diff --git a/bento_beacon/utils/katsu_utils.py b/bento_beacon/utils/katsu_utils.py index 2934aedd..eeddaa61 100644 --- a/bento_beacon/utils/katsu_utils.py +++ b/bento_beacon/utils/katsu_utils.py @@ -1,8 +1,11 @@ -import requests +import aiohttp from flask import current_app from functools import reduce from json import JSONDecodeError from urllib.parse import urlsplit, urlunsplit + +from .exceptions import APIException, InvalidQuery +from .http import tcp_connector from typing import Literal from .exceptions import APIException, InvalidQuery from ..authz.access import create_access_header_or_fall_back @@ -11,9 +14,9 @@ RequiresAuthOptions = Literal["none", "forwarded", "full"] -def katsu_filters_query(beacon_filters, datatype, get_biosample_ids=False): +async def katsu_filters_query(beacon_filters, datatype, get_biosample_ids=False): payload = katsu_json_payload(beacon_filters, datatype, get_biosample_ids) - response = katsu_network_call(payload) + response = await katsu_network_call(payload) results = response.get("results") match_list = [] @@ -34,7 +37,7 @@ def katsu_filters_query(beacon_filters, datatype, get_biosample_ids=False): return list(set(match_list)) -def katsu_filters_and_sample_ids_query(beacon_filters, datatype, sample_ids): +async def katsu_filters_and_sample_ids_query(beacon_filters, datatype, sample_ids): # empty query if not beacon_filters and not sample_ids: @@ -44,10 +47,10 @@ def katsu_filters_and_sample_ids_query(beacon_filters, datatype, sample_ids): filters_copy = beacon_filters[:] if sample_ids: filters_copy.append({"id": "biosamples.[item].id", "operator": "#in", "value": sample_ids}) - return katsu_filters_query(filters_copy, datatype) + return await katsu_filters_query(filters_copy, datatype) -def katsu_network_call(payload, endpoint=None): +async def katsu_network_call(payload, endpoint=None): c = current_app.config # awkward default since current_app not available in function params @@ -56,26 +59,22 @@ def katsu_network_call(payload, endpoint=None): current_app.logger.debug(f"calling katsu url {url}") try: - r = requests.post( - url, - headers=create_access_header_or_fall_back(), - json=payload, - timeout=c["KATSU_TIMEOUT"], - verify=c["BENTO_VALIDATE_SSL"], - ) + async with aiohttp.ClientSession(connector=tcp_connector(c)) as s: + r = await s.post( + url, headers=await create_access_header_or_fall_back(), timeout=c["KATSU_TIMEOUT"], json=payload + ) - katsu_response = r.json() if not r.ok: - current_app.logger.warning( - f"katsu error, status: {r.status_code}, message: {katsu_response.get('message')}" - ) + current_app.logger.warning(f"katsu error, status: {r.status}, message: {katsu_response.get('message')}") raise APIException(message=f"error searching katsu metadata service: {katsu_response.get('message')}") + katsu_response = await r.json() + except JSONDecodeError: # katsu returns html for unhandled exceptions, not json current_app.logger.error(f"katsu error: JSON decode error with POST {url}") raise APIException(message="invalid non-JSON response from katsu") - except requests.exceptions.RequestException as e: + except aiohttp.ClientError as e: current_app.logger.error(f"katsu error: {e}") raise APIException(message="error calling katsu metadata service") @@ -83,7 +82,7 @@ def katsu_network_call(payload, endpoint=None): # used for GET calls at particular katsu endpoints, eg /biosamples -def katsu_get(endpoint, id=None, query="", requires_auth: RequiresAuthOptions = "none"): +async def katsu_get(endpoint, id=None, query="", requires_auth: RequiresAuthOptions = "none"): c = current_app.config katsu_base_url = c["KATSU_BASE_URL"] timeout = c["KATSU_TIMEOUT"] @@ -101,20 +100,21 @@ def katsu_get(endpoint, id=None, query="", requires_auth: RequiresAuthOptions = ) ) + headers = {} + if requires_auth == "forwarded": + headers = auth_header_from_request() + elif requires_auth == "full": + headers = await create_access_header_or_fall_back() try: - headers = {} - if requires_auth == "forwarded": - headers = auth_header_from_request() - elif requires_auth == "full": - headers = create_access_header_or_fall_back() - r = requests.get(query_url, headers=headers, timeout=timeout, verify=c["BENTO_VALIDATE_SSL"]) - katsu_response = r.json() + async with aiohttp.ClientSession(connector=tcp_connector(c)) as s: + r = await s.get(query_url, headers=headers, timeout=timeout) + katsu_response = await r.json() except JSONDecodeError: # katsu returns html for unhandled exceptions, not json current_app.logger.error(f"katsu error: JSON decode error with GET {query_url}") raise APIException(message="invalid non-JSON response from katsu") - except requests.exceptions.RequestException as e: + except aiohttp.ClientError as e: current_app.logger.error(f"katsu error: {e}") raise APIException(message="error calling katsu metadata service") @@ -126,17 +126,15 @@ def katsu_get(endpoint, id=None, query="", requires_auth: RequiresAuthOptions = # ------------------------------------------------------- -def search_from_config(config_filters): +async def search_from_config(config_filters): # query error checking handled in katsu query_string = "&".join(f'{cf["id"]}{cf["operator"]}{cf["value"]}' for cf in config_filters) - response = katsu_get(current_app.config["KATSU_BEACON_SEARCH"], query=query_string, requires_auth="full") + response = await katsu_get(current_app.config["KATSU_BEACON_SEARCH"], query=query_string, requires_auth="full") return response.get("matches", []) -def get_katsu_config_search_fields(requires_auth: RequiresAuthOptions): - # standard forwarded auth for normal beacon requests - # "none" auth for beacon network init, which does not have a request context - fields = katsu_get(current_app.config["KATSU_PUBLIC_CONFIG_ENDPOINT"], requires_auth=requires_auth) +async def get_katsu_config_search_fields(requires_auth: RequiresAuthOptions): + fields = await katsu_get(current_app.config["KATSU_PUBLIC_CONFIG_ENDPOINT"], requires_auth="forwarded") current_app.config["KATSU_CONFIG_SEARCH_FIELDS"] = fields return fields @@ -208,8 +206,8 @@ def katsu_json_payload(filters, datatype, get_biosample_ids): # ------------------------------------------------------- -def katsu_autocomplete_terms(endpoint): - return katsu_get(endpoint).get("results", []) +async def katsu_autocomplete_terms(endpoint): + return await katsu_get(endpoint).get("results", []) def katsu_autocomplete_to_beacon_filter(a): @@ -221,9 +219,9 @@ def katsu_resources_to_beacon_resource(r): return {key: value for (key, value) in r.items() if key != "created" and key != "updated"} -def katsu_config_filtering_terms(): +async def katsu_config_filtering_terms(): filtering_terms = [] - sections = get_katsu_config_search_fields(requires_auth="forwarded").get("sections", []) + sections = (await get_katsu_config_search_fields(requires_auth="forwarded")).get("sections", []) for section in sections: for field in section["fields"]: filtering_term = { @@ -255,10 +253,10 @@ def katsu_config_filtering_terms(): # memoize? -def get_filtering_terms(): +async def get_filtering_terms(): # add ontology filters here when we start supporting ontologies # could also add filters for phenopacket and experiment queries - return katsu_config_filtering_terms() + return await katsu_config_filtering_terms() # ------------------------------------------------------- @@ -266,20 +264,20 @@ def get_filtering_terms(): # ------------------------------------------------------- -def katsu_total_individuals_count(): +async def katsu_total_individuals_count(): c = current_app.config endpoint = c["KATSU_INDIVIDUALS_ENDPOINT"] - count_response = katsu_get(endpoint, query="page_size=1", requires_auth="full") + count_response = await katsu_get(endpoint, query="page_size=1", requires_auth="full") count = count_response.get("count") return count -def katsu_datasets(id=None): +async def katsu_datasets(id=None): c = current_app.config endpoint = c["KATSU_DATASETS_ENDPOINT"] try: # right now, the datasets endpoint doesn't need any authorization for listing - response = katsu_get(endpoint, id, query="format=phenopackets", requires_auth="none") + response = await katsu_get(endpoint, id, query="format=phenopackets", requires_auth="none") except APIException: return {} @@ -292,35 +290,35 @@ def katsu_datasets(id=None): return response # single dataset -def phenopackets_for_ids(ids): +async def phenopackets_for_ids(ids): # retrieve from katsu search payload = {"data_type": "phenopacket", "query": ["#in", ["#resolve", "subject", "id"], ["#list", *ids]]} endpoint = current_app.config["KATSU_SEARCH_ENDPOINT"] - return katsu_network_call(payload, endpoint) + return await katsu_network_call(payload, endpoint) -def biosample_ids_for_individuals(individual_ids): +async def biosample_ids_for_individuals(individual_ids): if not individual_ids: return [] filters = [{"id": "subject.id", "operator": "#in", "value": individual_ids}] - return katsu_filters_query(filters, "phenopacket", get_biosample_ids=True) + return await katsu_filters_query(filters, "phenopacket", get_biosample_ids=True) -def search_summary_statistics(ids): +async def search_summary_statistics(ids): endpoint = current_app.config["KATSU_SEARCH_OVERVIEW"] payload = {"id": ids} - return katsu_network_call(payload, endpoint) + return await katsu_network_call(payload, endpoint) -def overview_statistics(): - return katsu_get(current_app.config["KATSU_PRIVATE_OVERVIEW"], requires_auth="full") +async def overview_statistics(): + return await katsu_get(current_app.config["KATSU_PRIVATE_OVERVIEW"], requires_auth="full") -def katsu_censorship_settings() -> tuple[int | None, int | None]: +async def katsu_censorship_settings() -> tuple[int | None, int | None]: # TODO: should be project-dataset scoped # TODO: should be called on-the-fly and pass request authorization headers onward, since this can change based on # scoping and the token's particular permissions. - rules = katsu_get(current_app.config["KATSU_PUBLIC_RULES"], requires_auth="none") + rules = await katsu_get(current_app.config["KATSU_PUBLIC_RULES"], requires_auth="none") max_filters = rules.get("max_query_parameters") count_threshold = rules.get("count_threshold") # return even if None diff --git a/bento_beacon/utils/reference.py b/bento_beacon/utils/reference.py index 21244bbc..170153df 100644 --- a/bento_beacon/utils/reference.py +++ b/bento_beacon/utils/reference.py @@ -1,37 +1,35 @@ -import requests +import aiohttp from json import JSONDecodeError from flask import current_app from .exceptions import APIException +from .http import tcp_connector from ..authz.access import create_access_header_or_fall_back -def gene_position_lookup(gene_id: str, assembly_id: str) -> dict[str, str | int | None]: +async def gene_position_lookup(gene_id: str, assembly_id: str) -> dict[str, str | int | None]: reference_url = current_app.config["REFERENCE_URL"] + f"/genomes/{assembly_id}/features?name={gene_id}" try: - r = requests.get( - reference_url, - headers=create_access_header_or_fall_back(), - verify=current_app.config["BENTO_VALIDATE_SSL"], - ) + async with aiohttp.ClientSession(connector=tcp_connector(current_app.config)) as s: + r = await s.get(reference_url, headers=await create_access_header_or_fall_back()) if not r.ok: current_app.logger.warning(f"reference service error, status: {r.status_code}, message: {r.text}") raise APIException(message="error searching reference service") - - results = r.json().get("results") - if not results: - return {} - - chromosome = results[0].get("contig_name", "").removeprefix("chr") - entries = results[0].get("entries") - start = entries[0].get("start_pos") if entries else None - end = entries[0].get("end_pos") if entries else None + results = (await r.json()).get("results") except JSONDecodeError: current_app.logger.error(f"error reading response from reference service") raise APIException(message="invalid non-JSON response from reference service") - except requests.exceptions.RequestException as e: - current_app.logger.error(f"reference service error: {e}") + except aiohttp.ClientError as e: + current_app.logger.error(f"reference error: {e}") raise APIException(message="error calling reference service") + if not results: + return {} + + chromosome = results[0].get("contig_name", "").removeprefix("chr") + entries = results[0].get("entries") + start = entries[0].get("start_pos") if entries else None + end = entries[0].get("end_pos") if entries else None + return {"chromosome": chromosome, "start": start, "end": end} diff --git a/bento_beacon/utils/search.py b/bento_beacon/utils/search.py index c27d91da..ca343dfb 100644 --- a/bento_beacon/utils/search.py +++ b/bento_beacon/utils/search.py @@ -1,12 +1,21 @@ from flask import current_app from functools import reduce from .gohan_utils import query_gohan -from .katsu_utils import katsu_filters_query, search_from_config, biosample_ids_for_individuals +from .katsu_utils import ( + katsu_filters_query, + search_from_config, + biosample_ids_for_individuals, +) from .beacon_response import add_info_to_response # TODO: search by linked field set elements instead of hardcoding -def biosample_id_search(variants_query=None, phenopacket_filters=None, experiment_filters=None, config_filters=None): +async def biosample_id_search( + variants_query=None, + phenopacket_filters=None, + experiment_filters=None, + config_filters=None, +): results_biosample_ids = {} if not (variants_query or phenopacket_filters or experiment_filters or config_filters): @@ -17,13 +26,13 @@ def biosample_id_search(variants_query=None, phenopacket_filters=None, experimen # variants query even though there are no variants in this beacon, this can happen in a network context add_info_to_response("No variants available at this beacon, query by metadata values only") return [] - variant_sample_ids = query_gohan(variants_query, "count", ids_only=True) + variant_sample_ids = await query_gohan(variants_query, "count", ids_only=True) if not variant_sample_ids: return [] results_biosample_ids["variant_sample_ids"] = variant_sample_ids if experiment_filters: - experiment_sample_ids = katsu_filters_query(experiment_filters, "experiment", get_biosample_ids=True) + experiment_sample_ids = await katsu_filters_query(experiment_filters, "experiment", get_biosample_ids=True) if not experiment_sample_ids: return [] results_biosample_ids["experiment_sample_ids"] = experiment_sample_ids @@ -31,15 +40,15 @@ def biosample_id_search(variants_query=None, phenopacket_filters=None, experimen # next two return *all* biosample ids for matching individuals if phenopacket_filters: - phenopacket_sample_ids = katsu_filters_query(phenopacket_filters, "phenopacket", get_biosample_ids=True) + phenopacket_sample_ids = await katsu_filters_query(phenopacket_filters, "phenopacket", get_biosample_ids=True) if not phenopacket_sample_ids: return [] results_biosample_ids["phenopacket_sample_ids"] = phenopacket_sample_ids if config_filters: - config_individuals = search_from_config(config_filters) + config_individuals = await search_from_config(config_filters) if not config_individuals: return [] - results_biosample_ids["config_sample_ids"] = biosample_ids_for_individuals(config_individuals) + results_biosample_ids["config_sample_ids"] = await biosample_ids_for_individuals(config_individuals) return list(reduce(set.intersection, (set(ids) for ids in results_biosample_ids.values()))) diff --git a/poetry.lock b/poetry.lock index 8551a095..d54037d4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,32 @@ # This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +[[package]] +name = "aiocache" +version = "0.12.3" +description = "multi backend asyncio cache" +optional = false +python-versions = "*" +files = [ + {file = "aiocache-0.12.3-py2.py3-none-any.whl", hash = "sha256:889086fc24710f431937b87ad3720a289f7fc31c4fd8b68e9f918b9bacd8270d"}, + {file = "aiocache-0.12.3.tar.gz", hash = "sha256:f528b27bf4d436b497a1d0d1a8f59a542c153ab1e37c3621713cb376d44c4713"}, +] + +[package.extras] +memcached = ["aiomcache (>=0.5.2)"] +msgpack = ["msgpack (>=0.5.5)"] +redis = ["redis (>=4.2.0)"] + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + [[package]] name = "aiohappyeyeballs" version = "2.4.3" @@ -123,6 +150,21 @@ yarl = ">=1.12.0,<2.0" [package.extras] speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +[[package]] +name = "aioresponses" +version = "0.7.7" +description = "Mock out requests made by ClientSession from aiohttp package" +optional = false +python-versions = "*" +files = [ + {file = "aioresponses-0.7.7-py2.py3-none-any.whl", hash = "sha256:6975f31fe5e7f2113a41bd387221f31854f285ecbc05527272cd8ba4c50764a3"}, + {file = "aioresponses-0.7.7.tar.gz", hash = "sha256:66292f1d5c94a3cb984f3336d806446042adb17347d3089f2d3962dd6e5ba55a"}, +] + +[package.dependencies] +aiohttp = ">=3.3.0,<4.0.0" +packaging = ">=22.0" + [[package]] name = "aiosignal" version = "1.3.1" @@ -148,6 +190,23 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "async-timeout" version = "4.0.3" @@ -159,6 +218,19 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "asyncio" +version = "3.4.3" +description = "reference implementation of PEP 3156" +optional = false +python-versions = "*" +files = [ + {file = "asyncio-3.4.3-cp33-none-win32.whl", hash = "sha256:b62c9157d36187eca799c378e572c969f0da87cd5fc42ca372d92cdb06e7e1de"}, + {file = "asyncio-3.4.3-cp33-none-win_amd64.whl", hash = "sha256:c46a87b48213d7464f22d9a497b9eef8c1928b68320a2fa94240f969f6fec08c"}, + {file = "asyncio-3.4.3-py3-none-any.whl", hash = "sha256:c4d18b22701821de07bd6aea8b53d21449ec0ec5680645e5317062ea21817d2d"}, + {file = "asyncio-3.4.3.tar.gz", hash = "sha256:83360ff8bc97980e4ff25c964c7bd3923d333d177aa4f7fb736b019f26c7cb41"}, +] + [[package]] name = "attrs" version = "24.2.0" @@ -180,30 +252,31 @@ tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "bento-lib" -version = "10.1.1" +version = "12.2.3" description = "A set of common utilities and helpers for Bento platform services." optional = false -python-versions = ">=3.10.0" +python-versions = "<4.0,>=3.10" files = [ - {file = "bento_lib-10.1.1-py3-none-any.whl", hash = "sha256:6fef7bd444aea9550c9f2882f220e2c5f1a15014fcbd244b928e74cfd50b98ee"}, - {file = "bento_lib-10.1.1.tar.gz", hash = "sha256:de0b721d81cc871f6123360885b24de871ba96a88a2a891a506cae6e39e9151a"}, + {file = "bento_lib-12.2.3-py3-none-any.whl", hash = "sha256:5f9cc206f471a87230f08fe20861c43071e8a68707a4cbfd3eb7674e0403e291"}, + {file = "bento_lib-12.2.3.tar.gz", hash = "sha256:bc99d42adc2f1e3a8121169987c8471e5b2ffe2011c4c11c345c6a0f0fa32f46"}, ] [package.dependencies] -aiohttp = ">=3.8.4,<4" -Flask = {version = ">=2.2.5,<4", optional = true, markers = "extra == \"flask\""} -jsonschema = ">=4.17.3,<5" -psycopg2-binary = ">=2.9.5,<3.0" -pydantic = ">=2.3.0,<3" -redis = ">=4.5.4,<5.0" -requests = ">=2.28.1,<3" -Werkzeug = ">=2.2.3,<4" +aiofiles = ">=24.1.0,<25" +aiohttp = ">=3.10.10,<4" +jsonschema = ">=4.23.0,<5" +psycopg2-binary = ">=2.9.9,<3.0" +pydantic = ">=2.8.2,<3" +pydantic-settings = ">=2.4.0,<2.6" +redis = ">=5.0.8,<6" +requests = ">=2.32.3,<3" +werkzeug = ">=2.2.3,<4" [package.extras] -asyncpg = ["asyncpg (>=0.29.0,<0.30.0)"] -django = ["Django (>=4.2.1,<5)", "djangorestframework (>=3.14.0,<3.15)"] -fastapi = ["fastapi (>=0.100,<0.105)"] -flask = ["Flask (>=2.2.5,<4)"] +asyncpg = ["asyncpg (>=0.29.0,<0.31.0)"] +django = ["django (>=5.0.8,<5.2)", "djangorestframework (>=3.14.0,<3.16)"] +fastapi = ["fastapi (>=0.112.1,<0.116)"] +flask = ["flask (>=2.2.5,<4)"] [[package]] name = "black" @@ -546,6 +619,7 @@ files = [ ] [package.dependencies] +asgiref = {version = ">=3.2", optional = true, markers = "extra == \"async\""} blinker = ">=1.6.2" click = ">=8.1.3" itsdangerous = ">=2.1.2" @@ -697,22 +771,38 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "jsonschema" -version = "4.17.3" +version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, + {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, + {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, ] [package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.9" +files = [ + {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, + {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, +] + +[package.dependencies] +referencing = ">=0.31.0" [[package]] name = "markupsafe" @@ -1265,46 +1355,25 @@ files = [ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] -name = "pyrsistent" -version = "0.20.0" -description = "Persistent/Functional/Immutable data structures" +name = "pydantic-settings" +version = "2.5.2" +description = "Settings management using Pydantic" optional = false python-versions = ">=3.8" files = [ - {file = "pyrsistent-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c3aba3e01235221e5b229a6c05f585f344734bd1ad42a8ac51493d74722bbce"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1beb78af5423b879edaf23c5591ff292cf7c33979734c99aa66d5914ead880f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21cc459636983764e692b9eba7144cdd54fdec23ccdb1e8ba392a63666c60c34"}, - {file = "pyrsistent-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5ac696f02b3fc01a710427585c855f65cd9c640e14f52abe52020722bb4906b"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win32.whl", hash = "sha256:0724c506cd8b63c69c7f883cc233aac948c1ea946ea95996ad8b1380c25e1d3f"}, - {file = "pyrsistent-0.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:8441cf9616d642c475684d6cf2520dd24812e996ba9af15e606df5f6fd9d04a7"}, - {file = "pyrsistent-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f3b1bcaa1f0629c978b355a7c37acd58907390149b7311b5db1b37648eb6958"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cdd7ef1ea7a491ae70d826b6cc64868de09a1d5ff9ef8d574250d0940e275b8"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cae40a9e3ce178415040a0383f00e8d68b569e97f31928a3a8ad37e3fde6df6a"}, - {file = "pyrsistent-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6288b3fa6622ad8a91e6eb759cfc48ff3089e7c17fb1d4c59a919769314af224"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win32.whl", hash = "sha256:7d29c23bdf6e5438c755b941cef867ec2a4a172ceb9f50553b6ed70d50dfd656"}, - {file = "pyrsistent-0.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:59a89bccd615551391f3237e00006a26bcf98a4d18623a19909a2c48b8e986ee"}, - {file = "pyrsistent-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:09848306523a3aba463c4b49493a760e7a6ca52e4826aa100ee99d8d39b7ad1e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a14798c3005ec892bbada26485c2eea3b54109cb2533713e355c806891f63c5e"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b14decb628fac50db5e02ee5a35a9c0772d20277824cfe845c8a8b717c15daa3"}, - {file = "pyrsistent-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e2c116cc804d9b09ce9814d17df5edf1df0c624aba3b43bc1ad90411487036d"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win32.whl", hash = "sha256:e78d0c7c1e99a4a45c99143900ea0546025e41bb59ebc10182e947cf1ece9174"}, - {file = "pyrsistent-0.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:4021a7f963d88ccd15b523787d18ed5e5269ce57aa4037146a2377ff607ae87d"}, - {file = "pyrsistent-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:79ed12ba79935adaac1664fd7e0e585a22caa539dfc9b7c7c6d5ebf91fb89054"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f920385a11207dc372a028b3f1e1038bb244b3ec38d448e6d8e43c6b3ba20e98"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5c2d012671b7391803263419e31b5c7c21e7c95c8760d7fc35602353dee714"}, - {file = "pyrsistent-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3992833fbd686ee783590639f4b8343a57f1f75de8633749d984dc0eb16c86"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win32.whl", hash = "sha256:881bbea27bbd32d37eb24dd320a5e745a2a5b092a17f6debc1349252fac85423"}, - {file = "pyrsistent-0.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:6d270ec9dd33cdb13f4d62c95c1a5a50e6b7cdd86302b494217137f760495b9d"}, - {file = "pyrsistent-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ca52d1ceae015859d16aded12584c59eb3825f7b50c6cfd621d4231a6cc624ce"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b318ca24db0f0518630e8b6f3831e9cba78f099ed5c1d65ffe3e023003043ba0"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fed2c3216a605dc9a6ea50c7e84c82906e3684c4e80d2908208f662a6cbf9022"}, - {file = "pyrsistent-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e14c95c16211d166f59c6611533d0dacce2e25de0f76e4c140fde250997b3ca"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win32.whl", hash = "sha256:f058a615031eea4ef94ead6456f5ec2026c19fb5bd6bfe86e9665c4158cf802f"}, - {file = "pyrsistent-0.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:58b8f6366e152092194ae68fefe18b9f0b4f89227dfd86a07770c3d86097aebf"}, - {file = "pyrsistent-0.20.0-py3-none-any.whl", hash = "sha256:c55acc4733aad6560a7f5f818466631f07efc001fd023f34a6c203f8b6df0f0b"}, - {file = "pyrsistent-0.20.0.tar.gz", hash = "sha256:4c48f78f62ab596c679086084d0dd13254ae4f3d6c72a83ffdf5ebdef8f265a4"}, + {file = "pydantic_settings-2.5.2-py3-none-any.whl", hash = "sha256:2c912e55fd5794a59bf8c832b9de832dcfdf4778d79ff79b708744eed499a907"}, + {file = "pydantic_settings-2.5.2.tar.gz", hash = "sha256:f90b139682bee4d2065273d5185d71d37ea46cfe57e1b5ae184fc6a0b2484ca0"}, ] +[package.dependencies] +pydantic = ">=2.7.0" +python-dotenv = ">=0.21.0" + +[package.extras] +azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"] +toml = ["tomli (>=2.0.1)"] +yaml = ["pyyaml (>=6.0.1)"] + [[package]] name = "pytest" version = "8.3.3" @@ -1345,6 +1414,20 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "pyyaml" version = "6.0.2" @@ -1409,21 +1492,36 @@ files = [ [[package]] name = "redis" -version = "4.6.0" +version = "5.2.0" description = "Python client for Redis database and key-value store" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, - {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, + {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, + {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, ] [package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} +async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} [package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] +hiredis = ["hiredis (>=3.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] + +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" [[package]] name = "requests" @@ -1465,6 +1563,105 @@ urllib3 = ">=1.25.10,<3.0" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "tomli", "tomli-w", "types-PyYAML", "types-requests"] +[[package]] +name = "rpds-py" +version = "0.21.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, + {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, + {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, + {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, + {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, + {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, + {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, + {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, + {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, + {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, + {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, + {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, + {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, + {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, + {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, + {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, + {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, + {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, + {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, + {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, + {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, + {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, + {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, + {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, + {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, + {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, + {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, + {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, + {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, +] + [[package]] name = "tomli" version = "2.0.2" @@ -1630,4 +1827,4 @@ propcache = ">=0.2.0" [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "ed70b0da2b5c8c7fcd24dbaa3a5c20863c44ff36d3bdd53b52e209bd3d30614f" +content-hash = "a5569704c4a8165a5f018a2810415c9f1867c8ff6b7ebf0e234d9c50e582d5fe" diff --git a/pyproject.toml b/pyproject.toml index 14ffb365..19f09370 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,10 +9,13 @@ packages = [{include = "bento_beacon"}] [tool.poetry.dependencies] python = "^3.10" -bento-lib = {extras = ["flask"], version = "^10.1.1"} -flask = "^3.0.3" +bento-lib = "^12.2.3" +flask = {extras = ["async"], version = "^3.0.3"} requests = "^2.32.3" -jsonschema = ">=4.17.3,<4.18.0" +asyncio = "^3.4.3" +aiohttp = "^3.10.10" +aiocache = "^0.12.3" +aioresponses = "^0.7.7" [tool.poetry.group.dev.dependencies] debugpy = "^1.8.6" diff --git a/tests/conftest.py b/tests/conftest.py index 2e4e1861..ab881693 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,21 +1,11 @@ +import jsonschema import os import pathlib from urllib.parse import urlunsplit +from aioresponses import aioresponses from flask import current_app -import jsonschema import pytest -import responses -from .data.service_responses import ( - katsu_datasets_response, - katsu_config_search_fields_response, - katsu_private_search_response, - katsu_private_search_overview_response, - katsu_public_search_response, - katsu_public_rules_response, - katsu_individuals_response, - gohan_search_response, - token_response, -) + TESTS_DIR = pathlib.Path(__file__).parent.absolute() BEACON_RESPONSE_SPEC_RELATIVE_PATH = "beacon-v2/framework/json/responses/" @@ -25,8 +15,7 @@ OPENID_CONFIG_URL = AUTHZ_URL + "/fake/openid-configuration" TOKEN_URL = AUTHZ_URL + "/fake/token" -MOCK_ACCESS_TOKEN = "fakeToken" -token_endpoint_config_response = { +TOKEN_ENDPOINT_CONFIG_RESPONSE = { "token_endpoint": TOKEN_URL, } @@ -52,7 +41,7 @@ def beacon_test_app(): "BENTO_OPENID_CONFIG_URL": OPENID_CONFIG_URL, "BEACON_CLIENT_ID": "aggregation", "BEACON_CLIENT_SECRET": "FAKE123", - "BENTO_VALIDATE_SSL": "false", + "BENTO_BEACON_NETWORK_ENABLED": "true", } ) @@ -72,6 +61,12 @@ def app_config(beacon_test_app): yield current_app.config +@pytest.fixture +def aioresponse(): + with aioresponses() as m: + yield m + + def validate_response(response, schema_filename): beacon_response_spec_dir = urlunsplit(("file", str(TESTS_DIR.parent), BEACON_RESPONSE_SPEC_RELATIVE_PATH, "", "")) jsonschema.validate( @@ -82,78 +77,3 @@ def validate_response(response, schema_filename): referrer=True, ), ) - - -# ------------------------------ -# mock external api calls -# could parameterize the different endpoint urls, -# but main danger is the endpoints changing in the services themselves, but that's not tested here - - -def authz_everything_true(count=1): - mock_post(f"{AUTHZ_URL}/policy/evaluate", {"result": [[True] for _ in range(count)]}) - - -def authz_everything_false(count=1): - mock_post(f"{AUTHZ_URL}/policy/evaluate", {"result": [[False] for _ in range(count)]}) - - -def auth_get_oidc_token(): - auth_oidc_token_config() - auth_oidc_token_response() - - -def auth_oidc_token_config(): - mock_get(OPENID_CONFIG_URL, token_endpoint_config_response) - - -def auth_oidc_token_response(): - mock_post(TOKEN_URL, token_response) - - -def katsu_config_search_fields(): - mock_get(f"{KATSU_URL}/api/public_search_fields", {"result": katsu_config_search_fields_response}) - - -def katsu_private_search(): - mock_post(f"{KATSU_URL}/private/search", katsu_private_search_response) - - -def katsu_private_search_overview(): - mock_post(f"{KATSU_URL}/api/search_overview", katsu_private_search_overview_response) - - -def katsu_datasets(): - mock_get(f"{KATSU_URL}/api/datasets", katsu_datasets_response) - - -def katsu_individuals(): - mock_get(f"{KATSU_URL}/api/individuals", katsu_individuals_response) - - -def katsu_public_search(): - mock_get(f"{KATSU_URL}/api/public", katsu_public_search_response) - - -def katsu_public_rules(): - mock_get(f"{KATSU_URL}/api/public_rules", katsu_public_rules_response) - - -def gohan_search(): - mock_get(f"{GOHAN_URL}/variants/get/by/variantId", gohan_search_response) - - -def gohan_overview(): - mock_get(f"{GOHAN_URL}/variants/overview", gohan_search_response) - - -def mock_get(url, response): - responses.get(url, json=response) - - -def mock_post(url, response): - # ignore request payload - responses.post(url, json=response) - - -# add github test workflow diff --git a/tests/data/beacon_network_config.json b/tests/data/beacon_network_config.json index 9e26dfee..943ed642 100644 --- a/tests/data/beacon_network_config.json +++ b/tests/data/beacon_network_config.json @@ -1 +1,8 @@ -{} \ No newline at end of file +{ + "beacons": [ + "https://test.local/api/beacon", + "https://fake2.bento.local/api/beacon" + ], + "network_default_timeout_seconds": 30, + "network_variants_query_timeout_seconds": 120 +} \ No newline at end of file diff --git a/tests/test_routes.py b/tests/test_routes.py index 7e118d25..cd9908cc 100644 --- a/tests/test_routes.py +++ b/tests/test_routes.py @@ -1,19 +1,18 @@ -import responses +from .data.service_responses import ( + katsu_datasets_response, + katsu_public_rules_response, + katsu_config_search_fields_response, + gohan_search_response, + katsu_private_search_response, + katsu_public_search_response, + katsu_private_search_overview_response, + katsu_individuals_response, + token_response, +) from .conftest import ( validate_response, - authz_everything_true, - authz_everything_false, - auth_get_oidc_token, - katsu_config_search_fields, - katsu_datasets, - katsu_private_search, - katsu_private_search_overview, - katsu_individuals, - katsu_public_search, - katsu_public_rules, - gohan_search, - gohan_overview, + TOKEN_ENDPOINT_CONFIG_RESPONSE, ) @@ -31,29 +30,32 @@ } +# -------------------------------------------------------- # info endpoints # -------------------------------------------------------- -@responses.activate -def test_service_info(client): - katsu_datasets() +def test_service_info(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + aioresponse.get(datasets_url, payload=katsu_datasets_response) response = client.get("/service-info") validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["service-info"]) -@responses.activate -def test_root(client): - katsu_datasets() +def test_root(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + aioresponse.get(datasets_url, payload=katsu_datasets_response) response = client.get("/") validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["info"]) -@responses.activate -def test_info(client): - katsu_datasets() - gohan_overview() - katsu_individuals() +def test_info(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + "?format=phenopackets" + individuals_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_INDIVIDUALS_ENDPOINT"] + "?page_size=1" + gohan_overview_url = app_config["GOHAN_BASE_URL"] + app_config["GOHAN_OVERVIEW_ENDPOINT"] + aioresponse.get(datasets_url, payload=katsu_datasets_response) + aioresponse.get(gohan_overview_url, payload=gohan_search_response) + aioresponse.get(individuals_url, payload=katsu_individuals_response) response = client.get("/info") validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["info"]) @@ -68,25 +70,29 @@ def test_entry_types(client): validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["entry_types"]) -@responses.activate -def test_configuration_endpoint(client): - katsu_datasets() +def test_configuration_endpoint(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + aioresponse.get(datasets_url, payload=katsu_datasets_response) response = client.get("/configuration") validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["configuration"]) -@responses.activate -def test_filtering_terms(client): - katsu_config_search_fields() +def test_filtering_terms(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + search_fields_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_PUBLIC_CONFIG_ENDPOINT"] + aioresponse.get(search_fields_url, payload=katsu_config_search_fields_response) + aioresponse.get(datasets_url, payload=katsu_datasets_response) response = client.get("/filtering_terms") validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["filtering_terms"]) -@responses.activate -def test_overview(client): - katsu_datasets() - gohan_overview() - katsu_individuals() +def test_overview(app_config, client, aioresponse): + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + "?format=phenopackets" + individuals_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_INDIVIDUALS_ENDPOINT"] + "?page_size=1" + gohan_overview_url = app_config["GOHAN_BASE_URL"] + app_config["GOHAN_OVERVIEW_ENDPOINT"] + aioresponse.get(datasets_url, payload=katsu_datasets_response) + aioresponse.get(gohan_overview_url, payload=gohan_search_response) + aioresponse.get(individuals_url, payload=katsu_individuals_response) response = client.get("/overview") # /overview is bento-only, does not exist in beacon spec @@ -95,48 +101,73 @@ def test_overview(client): assert "overview" in response.get_json().get("response") -# -------------------------------- - -request_body = { - "meta": {"apiVersion": "2.0.0"}, - "query": { - "requestParameters": { - "g_variant": {"referenceName": "3", "start": [189631388], "assemblyId": "GRCh38", "end": [189897276]} - }, - "filters": [{"id": "sex", "operator": "=", "value": "FEMALE"}], - }, - "bento": {"showSummaryStatistics": True}, -} +# -------------------------------------------------------- +# entities +# -------------------------------------------------------- -@responses.activate -def test_datasets(client): - authz_everything_true() - katsu_config_search_fields() - katsu_datasets() +def test_datasets(app_config, client, aioresponse): + authz_evaluate_url = app_config["AUTHZ_URL"] + "/policy/evaluate" + search_fields_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_PUBLIC_CONFIG_ENDPOINT"] + datasets_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_DATASETS_ENDPOINT"] + aioresponse.post(authz_evaluate_url, payload={"result": [[True]]}) + aioresponse.get(search_fields_url, payload=katsu_config_search_fields_response) + aioresponse.get(datasets_url, payload=katsu_datasets_response) response = client.get("/datasets") assert response.status_code == 200 validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["collections_response"]) -@responses.activate -def test_individuals_no_query(client): - authz_everything_true() - katsu_individuals() +def test_individuals_no_query(app_config, client, aioresponse): + authz_evaluate_url = app_config["AUTHZ_URL"] + "/policy/evaluate" + individuals_url = app_config["KATSU_BASE_URL"] + app_config["KATSU_INDIVIDUALS_ENDPOINT"] + "?page_size=1" + aioresponse.post(authz_evaluate_url, payload={"result": [[True]]}) + aioresponse.get(individuals_url, payload=katsu_individuals_response) response = client.get("/individuals") assert response.status_code == 200 validate_response(response.get_json(), RESPONSE_SPEC_FILENAMES["count_response"]) -@responses.activate -def test_individuals_query_all_permissions(client): - authz_everything_true() - auth_get_oidc_token() - katsu_public_search() - katsu_private_search() - katsu_private_search_overview() - gohan_search() - response = client.post("/individuals", json=request_body) +# -------------------------------------------------------- +# queries +# -------------------------------------------------------- + + +BEACON_REQUEST_BODY = { + "meta": {"apiVersion": "2.0.0"}, + "query": { + "requestParameters": { + "g_variant": {"referenceName": "3", "start": [189631388], "assemblyId": "GRCh38", "end": [189897276]} + }, + "filters": [{"id": "sex", "operator": "=", "value": "FEMALE"}], + }, + "bento": {"showSummaryStatistics": True}, +} + +# aioresponses includes query params when matching urls +KATSU_QUERY_PARAMS = "sex=FEMALE" +GOHAN_QUERY_PARAMS = "assemblyId=GRCh38&chromosome=3&getSampleIdsOnly=True&lowerBound=189631389&upperBound=189897276" + + +def test_individuals_query_all_permissions(app_config, client, aioresponse): + authz_evaluate_url = app_config["AUTHZ_URL"] + "/policy/evaluate" + openid_config_url = app_config["OPENID_CONFIG_URL"] + token_url = app_config["AUTHZ_URL"] + "/fake/token" + katsu_url = app_config["KATSU_BASE_URL"] + katsu_private_search_url = katsu_url + app_config["KATSU_SEARCH_ENDPOINT"] + katsu_search_overview_url = katsu_url + app_config["KATSU_SEARCH_OVERVIEW"] + katsu_public_search_url = katsu_url + app_config["KATSU_BEACON_SEARCH"] + "?" + KATSU_QUERY_PARAMS + gohan_search_url = app_config["GOHAN_BASE_URL"] + app_config["GOHAN_SEARCH_ENDPOINT"] + "?" + GOHAN_QUERY_PARAMS + + aioresponse.get(openid_config_url, payload=TOKEN_ENDPOINT_CONFIG_RESPONSE) + aioresponse.post(authz_evaluate_url, payload={"result": [[True]]}) + aioresponse.post(token_url, payload=token_response, repeat=True) + aioresponse.post(katsu_private_search_url, payload=katsu_private_search_response) + aioresponse.post(katsu_search_overview_url, payload=katsu_private_search_overview_response) + aioresponse.get(katsu_public_search_url, payload=katsu_public_search_response) + aioresponse.get(gohan_search_url, payload=gohan_search_response) + + response = client.post("/individuals", json=BEACON_REQUEST_BODY) data = response.get_json() # waiting for fixes to beacon spec before we do any json verification here @@ -149,19 +180,38 @@ def test_individuals_query_all_permissions(client): assert data["responseSummary"]["numTotalResults"] == 4 -@responses.activate -def test_individuals_query_no_permissions(client): - authz_everything_false() - auth_get_oidc_token() - katsu_public_search() - katsu_private_search() - katsu_public_rules() - katsu_private_search_overview() - gohan_search() - response = client.post("/individuals", json=request_body) +def test_individuals_query_no_permissions(app_config, client, aioresponse): + authz_evaluate_url = app_config["AUTHZ_URL"] + "/policy/evaluate" + openid_config_url = app_config["OPENID_CONFIG_URL"] + token_url = app_config["AUTHZ_URL"] + "/fake/token" + katsu_url = app_config["KATSU_BASE_URL"] + katsu_private_search_url = katsu_url + app_config["KATSU_SEARCH_ENDPOINT"] + katsu_search_overview_url = katsu_url + app_config["KATSU_SEARCH_OVERVIEW"] + katsu_public_rules_url = katsu_url + app_config["KATSU_PUBLIC_RULES"] + katsu_public_search_url = katsu_url + app_config["KATSU_BEACON_SEARCH"] + "?" + KATSU_QUERY_PARAMS + gohan_search_url = app_config["GOHAN_BASE_URL"] + app_config["GOHAN_SEARCH_ENDPOINT"] + "?" + GOHAN_QUERY_PARAMS + + aioresponse.post(authz_evaluate_url, payload={"result": [[False]]}) + aioresponse.get(openid_config_url, payload=TOKEN_ENDPOINT_CONFIG_RESPONSE) + aioresponse.post(token_url, payload=token_response, repeat=True) + aioresponse.post(katsu_private_search_url, payload=katsu_private_search_response) + aioresponse.post(katsu_search_overview_url, payload=katsu_private_search_overview_response) + aioresponse.get(katsu_public_rules_url, payload=katsu_public_rules_response) + aioresponse.get(katsu_public_search_url, payload=katsu_public_search_response) + aioresponse.get(gohan_search_url, payload=gohan_search_response) + + response = client.post("/individuals", json=BEACON_REQUEST_BODY) data = response.get_json() # expect normal response with zero results assert response.status_code == 200 assert "responseSummary" in data assert data["responseSummary"]["numTotalResults"] == 0 + + +def test_network_endpoint(app_config, client, aioresponse): + authz_evaluate_url = app_config["AUTHZ_URL"] + "/policy/evaluate" + aioresponse.post(authz_evaluate_url, payload={"result": [[False]]}) + response = client.get("/network") + assert response.status_code == 200 + assert "beacons" in response.get_json()