Skip to content

Commit

Permalink
Improve logging
Browse files Browse the repository at this point in the history
  • Loading branch information
BenjaminPelletier committed Oct 3, 2023
1 parent 7013755 commit db3d82d
Show file tree
Hide file tree
Showing 4 changed files with 50 additions and 1 deletion.
4 changes: 4 additions & 0 deletions monitoring/mock_uss/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,3 +54,7 @@ class Database(ImplicitDict):
Database(one_time_tasks=[], task_errors=[], periodic_tasks={}),
decoder=lambda b: ImplicitDict.parse(json.loads(b.decode("utf-8")), Database),
)

fulfilled_request_ids = SynchronizedValue(
[], decoder=lambda b: json.loads(b.decode("utf-8"))
)
13 changes: 13 additions & 0 deletions monitoring/mock_uss/ridsp/routes_injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from . import database
from .database import db
from monitoring.monitorlib import geo
from ..database import fulfilled_request_ids

require_config_value(KEY_BASE_URL)
require_config_value(KEY_RID_VERSION)
Expand Down Expand Up @@ -50,6 +51,18 @@ def ridsp_create_test(test_id: str) -> Tuple[str, int]:
except ValueError as e:
msg = "Create test {} unable to parse JSON: {}".format(test_id, e)
return msg, 400
if "request_id" in json:
logger.debug(f"[ridsp_create_test] Request ID {json['request_id']}")
with fulfilled_request_ids as tx:
if json["request_id"] in tx:
logger.debug(
f"[ridsp_create_test] Already processed request ID {json['request_id']}"
)
return (
f"Request ID {json['request_id']} has already been fulfilled",
400,
)
tx.append(json["request_id"])

# Create ISA in DSS
(t0, t1) = req_body.get_span()
Expand Down
27 changes: 26 additions & 1 deletion monitoring/mock_uss/scdsc/routes_injection.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from loguru import logger
import requests.exceptions

from monitoring.mock_uss.database import fulfilled_request_ids
from monitoring.mock_uss.scdsc.routes_scdsc import op_intent_from_flightrecord
from monitoring.monitorlib.geo import Polygon
from monitoring.monitorlib.geotemporal import Volume4D, Volume4DCollection
Expand Down Expand Up @@ -155,6 +156,20 @@ def scdsc_inject_flight(flight_id: str) -> Tuple[str, int]:
except ValueError as e:
msg = "Create flight {} unable to parse JSON: {}".format(flight_id, e)
return msg, 400
if "request_id" in json:
logger.debug(
f"[inject_flight/{os.getpid()}:{flight_id}] Request ID {json['request_id']}"
)
with fulfilled_request_ids as tx:
if json["request_id"] in tx:
logger.debug(
f"[inject_flight/{os.getpid()}:{flight_id}] Already processed request ID {json['request_id']}"
)
return (
f"Request ID {json['request_id']} has already been fulfilled",
400,
)
tx.append(json["request_id"])
json, code = inject_flight(flight_id, req_body)
return flask.jsonify(json), code

Expand Down Expand Up @@ -360,6 +375,7 @@ def inject_flight(flight_id: str, req_body: InjectFlightRequest) -> Tuple[dict,
if existing_flight:
id = existing_flight.op_intent_reference.id
step_name = f"updating existing operational intent {id} in DSS"
logger.debug(f"[inject_flight/{pid}:{flight_id}] {step_name}")
result = scd_client.update_operational_intent_reference(
utm_client,
id,
Expand All @@ -369,6 +385,7 @@ def inject_flight(flight_id: str, req_body: InjectFlightRequest) -> Tuple[dict,
else:
id = str(uuid.uuid4())
step_name = f"creating new operational intent {id} in DSS"
logger.debug(f"[inject_flight/{pid}:{flight_id}] {step_name}")
result = scd_client.create_operational_intent_reference(utm_client, id, req)

# Notify subscribers
Expand Down Expand Up @@ -558,10 +575,18 @@ def scdsc_clear_area() -> Tuple[str, int]:
json = flask.request.json
if json is None:
raise ValueError("Request did not contain a JSON payload")
req = ImplicitDict.parse(json, ClearAreaRequest)
req: ClearAreaRequest = ImplicitDict.parse(json, ClearAreaRequest)
except ValueError as e:
msg = "Unable to parse ClearAreaRequest JSON request: {}".format(e)
return msg, 400
with fulfilled_request_ids as tx:
logger.debug(f"[scdsc_clear_area] Processing request ID {req.request_id}")
if req.request_id in tx:
logger.debug(
f"[scdsc_clear_area] Already processed request ID {req.request_id}"
)
return f"Request ID {req.request_id} has already been fulfilled", 400
tx.append(json["request_id"])
json, code = clear_area(req)
return flask.jsonify(json), code

Expand Down
7 changes: 7 additions & 0 deletions monitoring/monitorlib/fetch/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
import os
import traceback
import uuid
from typing import Dict, Optional, List

from enum import Enum
Expand Down Expand Up @@ -313,6 +314,12 @@ def query_and_describe(
if "timeout" not in req_kwargs:
req_kwargs["timeout"] = TIMEOUTS

# Attach a request_id field to the JSON body of any outgoing request with a JSON body that doesn't already have one
if "json" in req_kwargs and "request_id" not in req_kwargs["json"]:
json_body = json.loads(json.dumps(req_kwargs["json"]))
json_body["request_id"] = str(uuid.uuid4())
req_kwargs["json"] = json_body

failures = []
# Note: retry logic could be attached to the `client` Session by `mount`ing an HTTPAdapter with custom
# `max_retries`, however we do not want to mutate the provided Session. Instead, retry only on errors we explicitly
Expand Down

0 comments on commit db3d82d

Please sign in to comment.