Skip to content

Commit

Permalink
Merge pull request #13 from TranslatorSRI/async
Browse files Browse the repository at this point in the history
Update novelty code to be async
  • Loading branch information
maximusunc authored Aug 11, 2023
2 parents cd588a4 + 57e9dc2 commit 4c2af4f
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 25 deletions.
2 changes: 2 additions & 0 deletions app/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,3 +75,5 @@ def setup_logger():
logging_config = json.load(f)

config.dictConfig(logging_config)
logging.getLogger("httpcore").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
24 changes: 13 additions & 11 deletions app/novelty/compute_novelty.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"""


def molecular_sim(known, unknown, message):
async def molecular_sim(known, unknown, message):
unknown_ids = []
known_ids = []
if len(unknown) > 0:
Expand Down Expand Up @@ -59,14 +59,14 @@ def molecular_sim(known, unknown, message):
message["results"][drug]["node_bindings"][s[1]][0]["id"]
)

smile_unkown = mol_to_smile_molpro(unknown_ids)
smile_known = mol_to_smile_molpro(known_ids)
smile_unkown = await mol_to_smile_molpro(unknown_ids)
smile_known = await mol_to_smile_molpro(known_ids)

similarity_map = find_nearest_neighbors(smile_unkown, smile_known, 0, 1)
return similarity_map


def get_publication_info(pub_id):
async def get_publication_info(pub_id):
"""
Args: PMID
Returns: The publication info
Expand All @@ -75,8 +75,8 @@ def get_publication_info(pub_id):
request_id = "1df88223-c0f8-47f5-a1f3-661b944c7849"
full_url = f"{base_url}{pub_id}&request_id={request_id}"
try:
with httpx.Client(timeout=30) as client:
response = client.get(full_url)
async with httpx.AsyncClient(timeout=30) as client:
response = await client.get(full_url)
response.raise_for_status()
response = response.json()
except Exception:
Expand Down Expand Up @@ -141,7 +141,7 @@ def recency_function_exp(number_of_publ, age_of_oldest_publ, max_number, max_age
return recency


def extracting_drug_fda_publ_date(message, unknown):
async def extracting_drug_fda_publ_date(message, unknown):
"""
Upon querying, the response is returned as a list containing 10 dictionaries,
with each dictionary representing the response from an ARA. The function 'extracting_drug_fda_publ_date'
Expand Down Expand Up @@ -246,7 +246,9 @@ def extracting_drug_fda_publ_date(message, unknown):
# print(publications)
publications_1 = ",".join(publications)
try:
response_pub = get_publication_info(publications_1)
response_pub = await get_publication_info(
publications_1
)
if response_pub["_meta"]["n_results"] == 0:
age_oldest = np.nan
else:
Expand Down Expand Up @@ -392,7 +394,7 @@ def novelty_score(fda_status, recency, similarity):
return score


def compute_novelty(message, logger):
async def compute_novelty(message, logger):
"""INPUT: JSON Response with merged annotated results for a 1-H query
1. load the json file
Expand All @@ -409,7 +411,7 @@ def compute_novelty(message, logger):
# # Step 2

# start = time.time()
df, query_chk = extracting_drug_fda_publ_date(message, unknown)
df, query_chk = await extracting_drug_fda_publ_date(message, unknown)
# print(f"Time to extract fda status and Publication data:{time.time()-start}")
# # print(df.head())
# # print(query_chk)
Expand All @@ -432,7 +434,7 @@ def compute_novelty(message, logger):
if query_chk == 1:
# start = time.time()
try:
similarity_map = molecular_sim(known, unknown, message)
similarity_map = await molecular_sim(known, unknown, message)
df["similarity"] = df.apply(
lambda row: similarity_map[row["drug"]][0][1]
if row["drug"] in similarity_map.keys()
Expand Down
8 changes: 4 additions & 4 deletions app/novelty/extr_smile_molpro_by_id.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import httpx


def mol_to_smile_molpro(molecules):
async def mol_to_smile_molpro(molecules):
"""
Args:
List
Expand All @@ -18,16 +18,16 @@ def mol_to_smile_molpro(molecules):

data_mol = list(set(molecules))
# print(f'init data: {len(data_mol)}')
with httpx.Client(timeout=30) as client:
async with httpx.AsyncClient(timeout=30) as client:
while data_mol:
# print(f'before: {len(data_mol)}')
data_mol_before = len(data_mol)
response = client.post(url, headers=headers, json=data_mol)
response = await client.post(url, headers=headers, json=data_mol)

if response.status_code == 200:
json_response = response.json()
collec_url = json_response["url"]
temp_collec_response = client.get(collec_url)
temp_collec_response = await client.get(collec_url)
if temp_collec_response.status_code == 200:
collec_response = temp_collec_response.json()

Expand Down
16 changes: 9 additions & 7 deletions app/ordering_components.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,17 +43,19 @@ def get_clinical_evidence(result, message, logger, db_conn):
return compute_clinical_evidence(result, message, logger, db_conn)


def get_novelty(message, logger):
return compute_novelty(message, logger)
async def get_novelty(message, logger):
novelty_df = await compute_novelty(message, logger)
novelty_dict = novelty_df.to_dict(orient="index")
novelty_scores = {
node["drug"]: node["novelty_score"] for node in novelty_dict.values()
}
return novelty_scores


def get_ordering_components(message, logger):
async def get_ordering_components(message, logger):
logger.debug(f"Computing scores for {len(message['results'])} results")
db_conn = redis.Redis(connection_pool=redis_pool)
novelty_scores_dict = get_novelty(message, logger).to_dict(orient="index")
novelty_scores = {
node["drug"]: node["novelty_score"] for node in novelty_scores_dict.values()
}
novelty_scores = await get_novelty(message, logger)
for result in tqdm(message.get("results") or []):
clinical_evidence_score = get_clinical_evidence(
result,
Expand Down
6 changes: 3 additions & 3 deletions app/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

openapi_args = dict(
title="Answer Appraiser",
version="0.3.1",
version="0.3.2",
terms_of_service="",
translator_component="Utility",
translator_teams=["Standards Reference Implementation Team"],
Expand Down Expand Up @@ -117,7 +117,7 @@

async def async_appraise(message, callback, logger: logging.Logger):
try:
get_ordering_components(message, logger)
await get_ordering_components(message, logger)
except Exception:
logger.error(f"Something went wrong while appraising: {traceback.format_exc()}")
logger.info("Done appraising")
Expand Down Expand Up @@ -174,7 +174,7 @@ async def sync_get_appraisal(query: Query = Body(..., example=EXAMPLE)):
status_code=400,
)
try:
get_ordering_components(message, logger)
await get_ordering_components(message, logger)
except Exception:
logger.error(f"Something went wrong while appraising: {traceback.format_exc()}")
logger.info("Done appraising")
Expand Down

0 comments on commit 4c2af4f

Please sign in to comment.