Skip to content

Commit

Permalink
Merge pull request #249 from bento-platform/develop
Browse files Browse the repository at this point in the history
Version 2.2.2
  • Loading branch information
zxenia authored Jul 29, 2021
2 parents 58aac95 + 55e7cab commit 65bfc82
Show file tree
Hide file tree
Showing 9 changed files with 78 additions and 22 deletions.
2 changes: 2 additions & 0 deletions chord_metadata_service/chord/tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
"TEST_SEARCH_QUERY_6",
"TEST_SEARCH_QUERY_7",
"TEST_SEARCH_QUERY_8",
"TEST_SEARCH_QUERY_9",
"TEST_FHIR_SEARCH_QUERY",
]

Expand Down Expand Up @@ -247,4 +248,5 @@ def valid_phenotypic_feature(biosample=None, phenopacket=None):
"URINARY BLADDER"]
TEST_SEARCH_QUERY_7 = ["#eq", ["#resolve", "experiment_results", "[item]", "file_format"], "VCF"]
TEST_SEARCH_QUERY_8 = ["#ico", ["#resolve", "experiment_type"], "chromatin"]
TEST_SEARCH_QUERY_9 = ["#eq", ["#resolve", "subject", "id"], "patient:1"]
TEST_FHIR_SEARCH_QUERY = {"query": {"match": {"gender": "FEMALE"}}}
12 changes: 12 additions & 0 deletions chord_metadata_service/chord/tests/test_api_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
TEST_SEARCH_QUERY_6,
TEST_SEARCH_QUERY_7,
TEST_SEARCH_QUERY_8,
TEST_SEARCH_QUERY_9,
TEST_FHIR_SEARCH_QUERY,
)
from ..models import Project, Dataset, TableOwnership, Table
Expand Down Expand Up @@ -392,6 +393,17 @@ def test_private_search_11_experiment(self):

# TODO table search fr experiments

def test_private_table_search_12(self):
# Valid query to search for subject id

r = self.client.post(reverse("private-table-search", args=[str(self.table.identifier)]), data=json.dumps({
"query": TEST_SEARCH_QUERY_9
}), content_type="application/json")
self.assertEqual(r.status_code, status.HTTP_200_OK)
c = r.json()
self.assertEqual(len(c["results"]), 1)
self.assertIn("patient:1", [phenopacket["subject"]["id"] for phenopacket in c["results"]])

@patch('chord_metadata_service.chord.views_search.es')
def test_fhir_search(self, mocked_es):
mocked_es.search.return_value = SEARCH_SUCCESS
Expand Down
11 changes: 11 additions & 0 deletions chord_metadata_service/chord/views_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from datetime import datetime
from django.db import connection
from django.conf import settings
from django.views.decorators.cache import cache_page
from psycopg2 import sql
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import AllowAny
Expand Down Expand Up @@ -235,6 +236,8 @@ def count_individual(ind):
}


# Cache page for the requested url
@cache_page(60 * 60 * 2)
@api_view(["GET"])
@permission_classes([OverrideOrSuperUserOnly])
def chord_table_summary(_request, table_id):
Expand Down Expand Up @@ -328,6 +331,8 @@ def search(request, internal_data=False):
}, start))


# Cache page for the requested url
@cache_page(60 * 60 * 2)
@api_view(["POST"])
@permission_classes([AllowAny])
def chord_search(request):
Expand All @@ -336,6 +341,8 @@ def chord_search(request):

# Mounted on /private/, so will get protected anyway; this allows for access from federation service
# TODO: Ugly and misleading permissions
# Cache page for the requested url
@cache_page(60 * 60 * 2)
@api_view(["POST"])
@permission_classes([AllowAny])
def chord_private_search(request):
Expand Down Expand Up @@ -473,6 +480,8 @@ def chord_table_search(request, table_id, internal=False):
return Response(len(query_results) > 0)


# Cache page for the requested url
@cache_page(60 * 60 * 2)
@api_view(["POST"])
@permission_classes([AllowAny])
def chord_public_table_search(request, table_id):
Expand All @@ -482,6 +491,8 @@ def chord_public_table_search(request, table_id):

# Mounted on /private/, so will get protected anyway; this allows for access from federation service
# TODO: Ugly and misleading permissions
# Cache page for the requested url
@cache_page(60 * 60 * 2)
@api_view(["POST"])
@permission_classes([AllowAny])
def chord_private_table_search(request, table_id):
Expand Down
2 changes: 1 addition & 1 deletion chord_metadata_service/package.cfg
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[package]
name = katsu
version = 2.2.1
version = 2.2.2
authors = Ksenia Zaytseva, David Lougheed, Simon Chénard, Romain Grégoire
19 changes: 12 additions & 7 deletions chord_metadata_service/phenopackets/search_schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,15 @@ def _tag_with_database_attrs(schema: dict, db_attrs: dict):
"properties": {
"id": {
"search": {
**search_optional_eq(0, queryable="internal"),
**search_optional_eq(0),
"database": {
"field": models.Individual._meta.pk.column
}
}
},
"alternate_ids": {
"items": {
"search": search_optional_str(0, queryable="internal", multiple=True)
"search": search_optional_str(1, queryable="internal", multiple=True)
},
"search": {
"database": {
Expand All @@ -68,14 +68,14 @@ def _tag_with_database_attrs(schema: dict, db_attrs: dict):
"date_of_birth": {
# TODO: Internal?
# TODO: Allow lt / gt
"search": search_optional_eq(1, queryable="internal")
"search": search_optional_eq(2, queryable="internal")
},
# TODO: Age
"sex": {
"search": search_optional_eq(2)
"search": search_optional_eq(3)
},
"karyotypic_sex": {
"search": search_optional_eq(3)
"search": search_optional_eq(4)
},
"taxonomy": ONTOLOGY_SEARCH_SCHEMA,
},
Expand Down Expand Up @@ -230,7 +230,7 @@ def _tag_with_database_attrs(schema: dict, db_attrs: dict):
"properties": {
"id": {
"search": merge_schema_dictionaries(
search_optional_eq(0, queryable="internal"),
search_optional_eq(0),
{"database": {"field": models.Biosample._meta.pk.column}}
)
},
Expand Down Expand Up @@ -334,7 +334,12 @@ def _tag_with_database_attrs(schema: dict, db_attrs: dict):
PHENOPACKET_SEARCH_SCHEMA = tag_schema_with_search_properties(schemas.PHENOPACKET_SCHEMA, {
"properties": {
"id": {
"search": {"database": {"field": models.Phenopacket._meta.pk.column}}
"search": {
**search_optional_eq(0),
"database": {
"field": models.Phenopacket._meta.pk.column
}
}
},
"subject": merge_schema_dictionaries(
INDIVIDUAL_SEARCH_SCHEMA,
Expand Down
13 changes: 12 additions & 1 deletion chord_metadata_service/phenopackets/tests/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@
"id": "patient:1",
"date_of_birth": "1967-01-01",
"sex": "MALE",
"age": {
"age": "P45Y"
},
"extra_properties": {
"education": "Bachelor's Degree"
}
Expand All @@ -61,7 +64,15 @@
VALID_INDIVIDUAL_2 = {
"id": "patient:2",
"date_of_birth": "1978-01-01",
"sex": "FEMALE"
"sex": "FEMALE",
"age": {
"start": {
"age": "P40Y"
},
"end": {
"age": "P45Y"
}
}
}

VALID_HTS_FILE = {
Expand Down
23 changes: 12 additions & 11 deletions chord_metadata_service/restapi/api_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,8 @@ def overview(_request):
individuals_taxonomy = Counter()
individuals_age = Counter()
individuals_ethnicity = Counter()
individuals_extra_prop = {}
extra_prop_counter_dict = {}
# individuals_extra_prop = {}
# extra_prop_counter_dict = {}

def count_individual(ind):

Expand All @@ -84,14 +84,15 @@ def count_individual(ind):
individuals_ethnicity.update((ind.ethnicity,))

# Generic Counter on all available extra properties
if ind.extra_properties:
for key in ind.extra_properties:
# Declare new Counter() if it's not delcared
if key not in extra_prop_counter_dict:
extra_prop_counter_dict[key] = Counter()

extra_prop_counter_dict[key].update((ind.extra_properties[key],))
individuals_extra_prop[key] = dict(extra_prop_counter_dict[key])
# Comment out this count for now since it explodes the response
# if ind.extra_properties:
# for key in ind.extra_properties:
# # Declare new Counter() if it's not delcared
# if key not in extra_prop_counter_dict:
# extra_prop_counter_dict[key] = Counter()
#
# extra_prop_counter_dict[key].update((ind.extra_properties[key],))
# individuals_extra_prop[key] = dict(extra_prop_counter_dict[key])

if ind.age is not None:
individuals_age.update((parse_individual_age(ind.age),))
Expand Down Expand Up @@ -164,7 +165,7 @@ def count_object_fields(obj, container: dict):
"taxonomy": dict(individuals_taxonomy),
"age": dict(individuals_age),
"ethnicity": dict(individuals_ethnicity),
"extra_properties": dict(individuals_extra_prop),
# "extra_properties": dict(individuals_extra_prop),
},
"phenotypic_features": {
# count is a number of unique phenotypic feature types (not all pfs in the database)
Expand Down
1 change: 1 addition & 0 deletions chord_metadata_service/restapi/tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def test_overview(self):
# phenopackets
self.assertEqual(response_obj['phenopackets'], 2)
self.assertEqual(response_obj['data_type_specific']['individuals']['count'], 2)
self.assertIsInstance(response_obj['data_type_specific']['individuals']['age'], dict)
self.assertEqual(response_obj['data_type_specific']['biosamples']['count'], 2)
self.assertEqual(response_obj['data_type_specific']['phenotypic_features']['count'], 1)
self.assertEqual(response_obj['data_type_specific']['diseases']['count'], 1)
Expand Down
17 changes: 15 additions & 2 deletions chord_metadata_service/restapi/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,26 @@ def parse_duration(string):

def parse_individual_age(age_obj):
""" Parses two possible age representations and returns average age or age as integer. """
# AGE OPTIONS
# "age": {
# "age": "P96Y"
# }
# AND
# "age": {
# "start": {
# "age": "P45Y"
# },
# "end": {
# "age": "P49Y"
# }
# }
if 'start' in age_obj:
start_age = parse_duration(age_obj['start']['age'])
end_age = parse_duration(age_obj['end']['age'])
# for the duration calculate the average age
age = (start_age + end_age) // 2
elif isinstance(age_obj, str):
age = parse_duration(age_obj)
elif 'age' in age_obj:
age = parse_duration(age_obj['age'])
else:
raise ValueError(f"Error: {age_obj} format not supported")
return age

0 comments on commit 65bfc82

Please sign in to comment.