diff --git a/monitoring/monitorlib/fetch/__init__.py b/monitoring/monitorlib/fetch/__init__.py index d69a3eb321..061337a799 100644 --- a/monitoring/monitorlib/fetch/__init__.py +++ b/monitoring/monitorlib/fetch/__init__.py @@ -73,10 +73,14 @@ def describe_flask_request(request: flask.Request) -> RequestDescription: "received_at": StringBasedDateTime(datetime.datetime.utcnow()), "headers": headers, } - try: - kwargs["json"] = request.json - except ValueError: - kwargs["body"] = request.data.decode("utf-8") + data = request.data.decode("utf-8") + if request.is_json: + try: + kwargs["json"] = json.loads(data) + except ValueError: + kwargs["body"] = data + else: + kwargs["body"] = data return RequestDescription(**kwargs) @@ -263,6 +267,14 @@ def status_code(self) -> int: def json_result(self) -> Optional[Dict]: return self.response.json + @property + def error_message(self) -> Optional[str]: + return ( + self.json_result["message"] + if self.json_result is not None and "message" in self.json_result + else None + ) + class QueryError(RuntimeError): """Error encountered when interacting with a server in the UTM ecosystem.""" diff --git a/monitoring/monitorlib/geo.py b/monitoring/monitorlib/geo.py index f568e0d0d3..b47b483bb2 100644 --- a/monitoring/monitorlib/geo.py +++ b/monitoring/monitorlib/geo.py @@ -7,6 +7,7 @@ from implicitdict import ImplicitDict import numpy as np import s2sphere +from s2sphere import LatLng from scipy.interpolate import RectBivariateSpline as Spline import shapely.geometry from uas_standards.astm.f3548.v21 import api as f3548v21 @@ -501,3 +502,38 @@ def egm96_geoid_offset(p: s2sphere.LatLng) -> float: # listed -90 to 90. Since latitude data are symmetric, we can simply # convert "-90 to 90" to "90 to -90" by inverting the requested latitude. return _egm96.ev(-lat, lng) + + +def generate_slight_overlap_area(in_points: List[LatLng]) -> List[LatLng]: + """ + Takes a list of LatLng points and returns a list of LatLng points that represents + a polygon only slightly overlapping with the input, and that is roughly half the diameter of the input. + + The returned polygon is built from the first point of the input, from which a square + is drawn in the direction opposite of the center of the input polygon. + + """ + overlap_corner = in_points[0] # the spot that will have a tiny overlap + + # Compute the center of mass of the input polygon + center = LatLng.from_degrees( + sum([point.lat().degrees for point in in_points]) / len(in_points), + sum([point.lng().degrees for point in in_points]) / len(in_points), + ) + + delta_lat = center.lat().degrees - overlap_corner.lat().degrees + delta_lng = center.lng().degrees - overlap_corner.lng().degrees + + same_lat_point = LatLng.from_degrees( + overlap_corner.lat().degrees, overlap_corner.lng().degrees - delta_lng + ) + same_lng_point = LatLng.from_degrees( + overlap_corner.lat().degrees - delta_lat, overlap_corner.lng().degrees + ) + + opposite_corner = LatLng.from_degrees( + overlap_corner.lat().degrees - delta_lat, + overlap_corner.lng().degrees - delta_lng, + ) + + return [overlap_corner, same_lat_point, opposite_corner, same_lng_point] diff --git a/monitoring/monitorlib/geo_test.py b/monitoring/monitorlib/geo_test.py new file mode 100644 index 0000000000..ba2b52af17 --- /dev/null +++ b/monitoring/monitorlib/geo_test.py @@ -0,0 +1,36 @@ +from typing import List, Tuple + +from s2sphere import LatLng + +from monitoring.monitorlib.geo import generate_slight_overlap_area + + +def _points(in_points: List[Tuple[float, float]]) -> List[LatLng]: + return [LatLng.from_degrees(*p) for p in in_points] + + +def test_generate_slight_overlap_area(): + # Square around 0,0 of edge length 2 -> first corner at 1,1 -> expect a square with overlapping corner at 1,1 + assert generate_slight_overlap_area( + _points([(1, 1), (1, -1), (-1, -1), (-1, 1)]) + ) == _points([(1, 1), (1, 2), (2, 2), (2, 1)]) + + # Square with diagonal from 0,0 to 1,1 -> first corner at 1,1 -> expect a square with overlapping corner at 1,1 + assert generate_slight_overlap_area( + _points([(1, 1), (0, 1), (0, 0), (1, 0)]) + ) == _points([(1, 1), (1, 1.5), (1.5, 1.5), (1.5, 1)]) + + # Square with diagonal from 0,0 to -1,-1 -> first corner at -1,-1 -> expect a square with overlapping corner at -1,-1 + assert generate_slight_overlap_area( + _points([(-1, -1), (0, -1), (0, 0), (-1, 0)]) + ) == _points([(-1, -1), (-1, -1.5), (-1.5, -1.5), (-1.5, -1)]) + + # Square with diagonal from 0,0 to -1,1 -> first corner at -1,1 -> expect a square with overlapping corner at -1,0 + assert generate_slight_overlap_area( + _points([(-1, 1), (-1, 0), (0, 0), (0, 1)]) + ) == _points([(-1, 1), (-1, 1.5), (-1.5, 1.5), (-1.5, 1)]) + + # Square with diagonal from 0,0 to 1,-1 -> first corner at 1,-1 -> expect a square with overlapping corner at 1,-1 + assert generate_slight_overlap_area( + _points([(1, -1), (1, 0), (0, 0), (0, -1)]) + ) == _points([(1, -1), (1, -1.5), (1.5, -1.5), (1.5, -1)]) diff --git a/monitoring/monitorlib/infrastructure.py b/monitoring/monitorlib/infrastructure.py index 093ef73c28..6b88b9d515 100644 --- a/monitoring/monitorlib/infrastructure.py +++ b/monitoring/monitorlib/infrastructure.py @@ -3,7 +3,7 @@ import functools from typing import Dict, List, Optional import urllib.parse -from aiohttp import ClientSession +from aiohttp import ClientSession, ClientResponse import jwt import requests @@ -190,32 +190,52 @@ def adjust_request_kwargs(self, url, method, kwargs): return kwargs async def put(self, url, **kwargs): + """Returns (status, headers, json)""" url = self._prefix_url + url if "auth" not in kwargs: kwargs = self.adjust_request_kwargs(url, "PUT", kwargs) async with self._client.put(url, **kwargs) as response: - return response.status, await response.json() + return ( + response.status, + {k: v for k, v in response.headers.items()}, + await response.json(), + ) async def get(self, url, **kwargs): + """Returns (status, headers, json)""" url = self._prefix_url + url if "auth" not in kwargs: kwargs = self.adjust_request_kwargs(url, "GET", kwargs) async with self._client.get(url, **kwargs) as response: - return response.status, await response.json() + return ( + response.status, + {k: v for k, v in response.headers.items()}, + await response.json(), + ) async def post(self, url, **kwargs): + """Returns (status, headers, json)""" url = self._prefix_url + url if "auth" not in kwargs: kwargs = self.adjust_request_kwargs(url, "POST", kwargs) async with self._client.post(url, **kwargs) as response: - return response.status, await response.json() + return ( + response.status, + {k: v for k, v in response.headers.items()}, + await response.json(), + ) async def delete(self, url, **kwargs): + """Returns (status, headers, json)""" url = self._prefix_url + url if "auth" not in kwargs: kwargs = self.adjust_request_kwargs(url, "DELETE", kwargs) async with self._client.delete(url, **kwargs) as response: - return response.status, await response.json() + return ( + response.status, + {k: v for k, v in response.headers.items()}, + await response.json(), + ) def default_scopes(scopes: List[str]): diff --git a/monitoring/prober/infrastructure.py b/monitoring/prober/infrastructure.py index 5a5f145965..d0124b10c6 100644 --- a/monitoring/prober/infrastructure.py +++ b/monitoring/prober/infrastructure.py @@ -100,7 +100,7 @@ def wrapper_default_scope(*args, **kwargs): resource_type_code_descriptions: Dict[ResourceType, str] = {} -# Next code: 373 +# Next code: 374 def register_resource_type(code: int, description: str) -> ResourceType: """Register that the specified code refers to the described resource. diff --git a/monitoring/prober/rid/v1/test_isa_simple_heavy_traffic_concurrent.py b/monitoring/prober/rid/v1/test_isa_simple_heavy_traffic_concurrent.py index d22d26b07e..b85e59cae9 100644 --- a/monitoring/prober/rid/v1/test_isa_simple_heavy_traffic_concurrent.py +++ b/monitoring/prober/rid/v1/test_isa_simple_heavy_traffic_concurrent.py @@ -107,8 +107,8 @@ def test_create_isa_concurrent(ids, session_ridv1_async): ) ) for isa_id, resp in results: - assert resp[0] == 200, resp[1] - data = resp[1] + assert resp[0] == 200, resp[2] + data = resp[2] assert data["service_area"]["id"] == isa_id assert data["service_area"]["flights_url"] == "https://example.com/dss" assert_datetimes_are_equal( @@ -133,9 +133,9 @@ def test_get_isa_by_ids_concurrent(ids, session_ridv1_async): ) ) for isa_id, resp in results: - assert resp[0] == 200, resp[1] + assert resp[0] == 200, resp[2] - data = resp[1] + data = resp[2] assert data["service_area"]["id"] == isa_id assert data["service_area"]["flights_url"] == FLIGHTS_URL @@ -162,8 +162,8 @@ def test_delete_isa_concurrent(ids, session_ridv1_async): ) for isa_id, resp in results: - assert resp[0] == 200, resp[1] - version = resp[1]["service_area"]["version"] + assert resp[0] == 200, resp[2] + version = resp[2]["service_area"]["version"] version_map[isa_id] = version # Delete ISAs concurrently @@ -178,4 +178,4 @@ def test_delete_isa_concurrent(ids, session_ridv1_async): ) for isa_id, resp in results: - assert resp[0], resp[1] + assert resp[0], resp[2] diff --git a/monitoring/prober/scd/test_operation_simple_heavy_traffic_concurrent.py b/monitoring/prober/scd/test_operation_simple_heavy_traffic_concurrent.py index 8a9d8d21e0..a885e7469a 100644 --- a/monitoring/prober/scd/test_operation_simple_heavy_traffic_concurrent.py +++ b/monitoring/prober/scd/test_operation_simple_heavy_traffic_concurrent.py @@ -258,7 +258,7 @@ def test_create_ops_concurrent(ids, scd_api, scd_session_async): op_id = req_map[0] op_resp_map[op_id] = {} op_resp_map[op_id]["status_code"] = resp[0][0] - op_resp_map[op_id]["content"] = resp[0][1] + op_resp_map[op_id]["content"] = resp[0][2] for op_id, resp in op_resp_map.items(): if resp["status_code"] != 201: try: @@ -342,7 +342,7 @@ def test_get_ops_by_ids_concurrent(ids, scd_api, scd_session_async): for op_id, resp in zip(map(ids, OP_TYPES), results): op_resp_map[op_id] = {} op_resp_map[op_id]["status_code"] = resp[0] - op_resp_map[op_id]["content"] = resp[1] + op_resp_map[op_id]["content"] = resp[2] for op_id, resp in op_resp_map.items(): assert resp["status_code"] == 200, resp["content"] @@ -381,7 +381,7 @@ def test_get_ops_by_search_concurrent(ids, scd_api, scd_session_async): for idx, resp in zip(range(len(OP_TYPES)), results): op_resp_map[idx] = {} op_resp_map[idx]["status_code"] = resp[0] - op_resp_map[idx]["content"] = resp[1] + op_resp_map[idx]["content"] = resp[2] for idx, resp in op_resp_map.items(): assert resp["status_code"] == 200, resp["content"] @@ -431,7 +431,7 @@ def test_mutate_ops_concurrent(ids, scd_api, scd_session, scd_session_async): op_id = req_map[0] op_resp_map[op_id] = {} op_resp_map[op_id]["status_code"] = resp[0][0] - op_resp_map[op_id]["content"] = resp[0][1] + op_resp_map[op_id]["content"] = resp[0][2] ovn_map.clear() @@ -486,7 +486,7 @@ def test_delete_op_concurrent(ids, scd_api, scd_session_async): for op_id, resp in zip(map(ids, OP_TYPES), results): op_resp_map[op_id] = {} op_resp_map[op_id]["status_code"] = resp[0] - op_resp_map[op_id]["content"] = resp[1] + op_resp_map[op_id]["content"] = resp[2] assert len(op_resp_map) == len(OP_TYPES) diff --git a/monitoring/uss_qualifier/resources/astm/f3548/v21/dss.py b/monitoring/uss_qualifier/resources/astm/f3548/v21/dss.py index 2f9f3699c6..9c059217c2 100644 --- a/monitoring/uss_qualifier/resources/astm/f3548/v21/dss.py +++ b/monitoring/uss_qualifier/resources/astm/f3548/v21/dss.py @@ -1,12 +1,13 @@ from __future__ import annotations +import uuid from typing import Tuple, List, Dict, Optional + from urllib.parse import urlparse -from loguru import logger from implicitdict import ImplicitDict from monitoring.monitorlib import infrastructure, fetch from monitoring.monitorlib.fetch import QueryType -from monitoring.monitorlib.scd import SCOPE_SC +from monitoring.monitorlib.scd import SCOPE_SC, SCOPE_AA from monitoring.uss_qualifier.resources.resource import Resource from monitoring.uss_qualifier.resources.communications import AuthAdapterResource from uas_standards.astm.f3548.v21.api import ( @@ -16,6 +17,16 @@ QueryOperationalIntentReferenceResponse, OperationalIntent, GetOperationalIntentDetailsResponse, + PutOperationalIntentReferenceParameters, + EntityOVN, + OperationalIntentState, + ImplicitSubscriptionParameters, + UssBaseURL, + ChangeOperationalIntentReferenceResponse, + SubscriberToNotify, + SetUssAvailabilityStatusParameters, + UssAvailabilityState, + UssAvailabilityStatusResponse, ) @@ -123,6 +134,117 @@ def get_full_op_intent_without_validation( return result, query + def put_op_intent( + self, + extents: List[Volume4D], + key: List[EntityOVN], + state: OperationalIntentState, + base_url: UssBaseURL, + id: Optional[str] = None, + ovn: Optional[str] = None, + ) -> Tuple[ + Optional[OperationalIntentReference], + Optional[List[SubscriberToNotify]], + fetch.Query, + ]: + if id is None: + url = f"/dss/v1/operational_intent_references/{str(uuid.uuid4())}" + query_type = QueryType.F3548v21DSSCreateOperationalIntentReference + else: + url = f"/dss/v1/operational_intent_references/{id}/{ovn}" + query_type = QueryType.F3548v21DSSUpdateOperationalIntentReference + + req = PutOperationalIntentReferenceParameters( + extents=extents, + key=key, + state=state, + uss_base_url=base_url, + new_subscription=ImplicitSubscriptionParameters(uss_base_url=base_url), + ) + query = fetch.query_and_describe( + self.client, + "PUT", + url, + query_type, + self.participant_id, + scope=SCOPE_SC, + json=req, + ) + if query.status_code != 200 and query.status_code != 201: + return None, None, query + else: + result = ChangeOperationalIntentReferenceResponse( + ImplicitDict.parse( + query.response.json, ChangeOperationalIntentReferenceResponse + ) + ) + return result.operational_intent_reference, result.subscribers, query + + def delete_op_intent( + self, + id: str, + ovn: str, + ) -> Tuple[ + Optional[OperationalIntentReference], + Optional[List[SubscriberToNotify]], + fetch.Query, + ]: + query = fetch.query_and_describe( + self.client, + "DELETE", + f"/dss/v1/operational_intent_references/{id}/{ovn}", + QueryType.F3548v21DSSDeleteOperationalIntentReference, + self.participant_id, + scope=SCOPE_SC, + ) + if query.status_code != 200: + return None, None, query + else: + result = ChangeOperationalIntentReferenceResponse( + ImplicitDict.parse( + query.response.json, ChangeOperationalIntentReferenceResponse + ) + ) + return result.operational_intent_reference, result.subscribers, query + + def set_uss_availability( + self, + uss_id: str, + available: bool, + version: str = "", + ) -> Tuple[Optional[str], fetch.Query]: + """ + Returns: + A tuple composed of + 1) the new version of the USS availability, or None if the query failed; + 2) the query. + """ + if available: + availability = UssAvailabilityState.Normal + else: + availability = UssAvailabilityState.Down + + req = SetUssAvailabilityStatusParameters( + old_version=version, + availability=availability, + ) + query = fetch.query_and_describe( + self.client, + "PUT", + f"/dss/v1/uss_availability/{uss_id}", + QueryType.F3548v21DSSSetUssAvailability, + self.participant_id, + scope=SCOPE_AA, + json=req, + ) + if query.status_code != 200: + return None, query + else: + result = UssAvailabilityStatusResponse( + ImplicitDict.parse(query.response.json, UssAvailabilityStatusResponse) + ) + return result.version, query + def is_same_as(self, other: DSSInstance) -> bool: return ( self.participant_id == other.participant_id diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_subscription_interactions.py b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_subscription_interactions.py index 741d57982f..f93923ee1d 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_subscription_interactions.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_subscription_interactions.py @@ -2,6 +2,7 @@ import arrow +from monitoring.monitorlib import geo from monitoring.prober.infrastructure import register_resource_type from monitoring.uss_qualifier.common_data_definitions import Severity from monitoring.uss_qualifier.resources.astm.f3411.dss import DSSInstanceResource @@ -17,6 +18,7 @@ class ISASubscriptionInteractions(GenericTestScenario): """Based on the test_subscription_isa_interactions.py from the legacy prober tool.""" ISA_TYPE = register_resource_type(370, "ISA") + SUB_TYPE = register_resource_type(373, "Subscription") def __init__( self, @@ -32,9 +34,10 @@ def __init__( self._isa_id = id_generator.id_factory.make_id( ISASubscriptionInteractions.ISA_TYPE ) - # sub id is isa_id with last character replaced with '1' - # (the generated isa_id ends with a few '0's) - self._sub_id = self._isa_id[:-1] + "1" + self._sub_id = id_generator.id_factory.make_id( + ISASubscriptionInteractions.SUB_TYPE + ) + self._isa_version: Optional[str] = None self._isa = isa.specification @@ -42,6 +45,25 @@ def __init__( self._isa_start_time = self._isa.shifted_time_start(now) self._isa_end_time = self._isa.shifted_time_end(now) self._isa_area = [vertex.as_s2sphere() for vertex in self._isa.footprint] + self._slight_overlap_area = geo.generate_slight_overlap_area(self._isa_area) + + self._isa_params = dict( + area_vertices=self._isa_area, + alt_lo=self._isa.altitude_min, + alt_hi=self._isa.altitude_max, + start_time=self._isa_start_time, + end_time=self._isa_end_time, + uss_base_url=self._isa.base_url, + isa_id=self._isa_id, + ) + self._sub_params = dict( + alt_lo=self._isa.altitude_min, + alt_hi=self._isa.altitude_max, + start_time=self._isa_start_time, + end_time=self._isa_end_time, + uss_base_url=self._isa.base_url, + sub_id=self._sub_id, + ) def run(self, context: ExecutionContext): self.begin_test_scenario(context) @@ -49,15 +71,32 @@ def run(self, context: ExecutionContext): self._setup_case() self.begin_test_case("ISA Subscription Interactions") - self.begin_test_step("ISA Subscription Interactions") - self._check_subscription_behaviors() + self.begin_test_step("New Subscription within ISA") + self._new_subscription_in_isa_step() + self.end_test_step() + self.begin_test_step("New subscription within ISA is mutated to ISA boundary") + self._mutate_subscription_towards_isa_boundary_step() self.end_test_step() + + # TODO extend with steps that: + # - create a subscription that barely touches the ISA + # - create a subscription outside of the ISA and then mutate to: + # - move the subscription into the ISA + # - move the subscription such that it barely touches the ISA + # - create a subscription within the ISA and then mutate to: + # - move the subscription entirely outside of the ISA + # - mutate the ISA so entirely outside of the subscription + # - mutate the ISA to the subscription boundary + # + # Consider doing the above with and without separate Subscription IDs to increase the chances + # of revealing implementation bugs + self.end_test_case() self.end_test_scenario() - def _check_subscription_behaviors(self): + def _new_subscription_in_isa_step(self): """ - Create an ISA. - Create a subscription, response should include the pre-existing ISA and have a notification_index of 0. @@ -71,14 +110,8 @@ def _check_subscription_behaviors(self): created_isa = self._dss_wrapper.put_isa_expect_response_code( check=check, expected_error_codes={200}, - area_vertices=self._isa_area, - alt_lo=self._isa.altitude_min, - alt_hi=self._isa.altitude_max, - start_time=self._isa_start_time, - end_time=self._isa_end_time, - uss_base_url=self._isa.base_url, - isa_id=self._isa_id, isa_version=None, + **self._isa_params, ) # Create a subscription @@ -88,13 +121,8 @@ def _check_subscription_behaviors(self): created_subscription = self._dss_wrapper.put_sub( check=check, area_vertices=self._isa_area, - alt_lo=self._isa.altitude_min, - alt_hi=self._isa.altitude_max, - start_time=self._isa_start_time, - end_time=self._isa_end_time, - uss_base_url=self._isa.base_url, - sub_id=self._sub_id, sub_version=None, + **self._sub_params, ) # Check the subscription @@ -134,17 +162,13 @@ def _check_subscription_behaviors(self): "Mutate the ISA", [self._dss.participant_id], ) as check: + isa_mutation_params = self._isa_params.copy() + isa_mutation_params["alt_hi"] -= 1 # reduce max altitude by one meter mutated_isa = self._dss_wrapper.put_isa_expect_response_code( check=check, expected_error_codes={200}, - area_vertices=self._isa_area, - alt_lo=self._isa.altitude_min, - alt_hi=self._isa.altitude_max - 1, # reduce max altitude by one meter - start_time=self._isa_start_time, - end_time=self._isa_end_time, - uss_base_url=self._isa.base_url, - isa_id=self._isa_id, isa_version=created_isa.dss_query.isa.version, + **isa_mutation_params, ) # Check that the subscription ID is returned in the response @@ -278,6 +302,225 @@ def _check_subscription_behaviors(self): sub_version=created_subscription.subscription.version, ) + def _mutate_subscription_towards_isa_boundary_step(self): + """ + - Create an ISA. + - Create a subscription with the ISA's area, response should include the pre-existing ISA + and have a notification_index of 0. + - Modify the subscription such that its area has a very small overlap with the ISA + - Modify the ISA, response should include the subscription with an incremented notification_index. + - Delete the ISA, response should include the subscription with an incremented notification_index. + - Delete the subscription. + """ + + # Create an ISA + with self.check("Create an ISA", [self._dss.participant_id]) as check: + created_isa = self._dss_wrapper.put_isa_expect_response_code( + check=check, + expected_error_codes={200}, + isa_version=None, + **self._isa_params, + ) + + # Create a subscription on the ISA boundary + with self.check( + "Create a subscription within the ISA footprint", [self._dss.participant_id] + ) as check: + created_subscription = self._dss_wrapper.put_sub( + check=check, + area_vertices=self._isa_area, + **self._sub_params, + ) + + # Mutate the subscription towards the ISA boundary + with self.check( + "Mutate the subscription towards the ISA boundary", + [self._dss.participant_id], + ) as check: + mutated_subscription = self._dss_wrapper.put_sub( + check=check, + area_vertices=self._slight_overlap_area, + sub_version=created_subscription.subscription.version, + **self._sub_params, + ) + + # Check the subscription + with self.check( + "Subscription for the ISA's area mentions the ISA", + [self._dss.participant_id], + ) as check: + if self._isa_id not in [isa.id for isa in mutated_subscription.isas]: + check.record_failed( + summary="Subscription response does not include the freshly created ISA", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"The subscription created for the area {self._isa_area} is expected to contain the ISA created for this same area. The returned subscription did not mention it.", + query_timestamps=[ + created_isa.dss_query.query.request.timestamp, + created_subscription.query.request.timestamp, + ], + ) + + with self.check( + "Mutated subscription has a notification_index of 0", + [self._dss.participant_id], + ) as check: + if created_subscription.subscription.notification_index != 0: + check.record_failed( + summary="Subscription notification_index is not 0", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"The subscription created for the area {self._isa_area} is expected to have a notification_index of 0. The returned subscription has a notification_index of {created_subscription.subscription.notification_index}.", + query_timestamps=[created_subscription.query.request.timestamp], + ) + + # Modify the ISA + with self.check( + "Mutate the ISA", + [self._dss.participant_id], + ) as check: + mutation_params = self._isa_params.copy() + mutation_params["alt_hi"] -= 1 # reduce max altitude by one meter + mutated_isa = self._dss_wrapper.put_isa_expect_response_code( + check=check, + expected_error_codes={200}, + isa_version=created_isa.dss_query.isa.version, + **mutation_params, + ) + + # Check that the subscription ID is returned in the response + with self.check( + "Response to the mutation of the ISA contains subscription ID", + [self._dss.participant_id], + ) as check: + + subs_to_mutated_isa = {} + for returned_subscriber in mutated_isa.dss_query.subscribers: + for sub_in_subscriber in returned_subscriber.raw.subscriptions: + subs_to_mutated_isa[ + sub_in_subscriber.subscription_id + ] = sub_in_subscriber + + if created_subscription.subscription.id not in subs_to_mutated_isa.keys(): + check.record_failed( + summary="ISA mutation response does not contain expected subscription ID", + severity=Severity.High, + participants=[self._dss.participant_id], + details="Mutating an ISA to which a subscription was made and then subsequently moved to the ISA's boundary," + " the DSS failed to return the subscription ID in the response.", + query_timestamps=[ + created_isa.dss_query.query.request.timestamp, + created_subscription.query.request.timestamp, + mutated_subscription.query.request.timestamp, + mutated_isa.dss_query.query.request.timestamp, + ], + ) + + # Check that the subscription index has been incremented by least by 1 + sub_to_mutated_isa = subs_to_mutated_isa.get( + created_subscription.subscription.id + ) + if sub_to_mutated_isa is not None: + with self.check( + "Subscription to an ISA has its notification index incremented after mutation", + [self._dss.participant_id], + ) as check: + if sub_to_mutated_isa.notification_index <= 0: + check.record_failed( + summary="Subscription notification_index has not been increased", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"The subscription created for the area {self._isa_area} is expected to have a notification_index of 1 or more. The returned subscription has a notification_index of {subs_to_mutated_isa[created_subscription.subscription.id].notification_index}.", + query_timestamps=[created_subscription.query.request.timestamp], + ) + + # Delete the ISA + with self.check( + "Delete the ISA", + [self._dss.participant_id], + ) as check: + deleted_isa = self._dss_wrapper.del_isa_expect_response_code( + main_check=check, + expected_error_codes={200}, + isa_id=mutated_isa.dss_query.isa.id, + isa_version=mutated_isa.dss_query.isa.version, + ) + + # Check response to deletion of ISA + with self.check( + "Response to the deletion of the ISA contains subscription ID", + [self._dss.participant_id], + ) as check: + + subs_to_deleted_isa = {} + for returned_subscriber in deleted_isa.dss_query.subscribers: + for sub_in_subscriber in returned_subscriber.raw.subscriptions: + subs_to_deleted_isa[ + sub_in_subscriber.subscription_id + ] = sub_in_subscriber + + if created_subscription.subscription.id not in subs_to_deleted_isa: + check.record_failed( + summary="ISA deletion response does not contain expected subscription ID", + severity=Severity.High, + participants=[self._dss.participant_id], + details="Deleting an ISA to which a subscription was made, the DSS failed to return the subscription ID in the response.", + query_timestamps=[ + created_isa.dss_query.query.request.timestamp, + created_subscription.query.request.timestamp, + deleted_isa.dss_query.query.request.timestamp, + ], + ) + + for subscriber_url, notification in deleted_isa.notifications.items(): + # For checking the notifications, we ignore the request we made for the subscription that we created. + if self._isa.base_url not in subscriber_url: + pid = ( + notification.query.participant_id + if "participant_id" in notification.query + else None + ) + with self.check("Notified subscriber", [pid] if pid else []) as check: + if not notification.success: + check.record_failed( + "Could not notify ISA subscriber", + Severity.Medium, + f"Attempting to notify subscriber for ISA {self._isa_id} at {subscriber_url} resulted in {notification.status_code}", + query_timestamps=[notification.query.request.timestamp], + ) + + subs_after_deletion = subs_to_deleted_isa.get( + created_subscription.subscription.id + ) + if subs_after_deletion is not None: + with self.check( + "Subscription to an ISA has its notification index incremented after deletion", + [self._dss.participant_id], + ) as check: + if ( + subs_after_deletion.notification_index + <= sub_to_mutated_isa.notification_index + ): + check.record_failed( + summary="Subscription notification_index has not been incremented", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"The subscription created for the area {self._isa_area} is expected to have its notification increased after the subscription was deleted." + f"The returned subscription has a notification_index of {subs_after_deletion.notification_index}, whilte the previous notification_index for that subscription was {sub_to_mutated_isa.notification_index}", + query_timestamps=[created_subscription.query.request.timestamp], + ) + + # Delete the subscription + with self.check( + "Subscription can be deleted", + [self._dss.participant_id], + ) as check: + self._dss_wrapper.del_sub( + check=check, + sub_id=self._sub_id, + sub_version=mutated_subscription.subscription.version, + ) + def _setup_case(self): self.begin_test_case("Setup") @@ -305,6 +548,9 @@ def _delete_isa_if_exists(self): def _clean_any_sub(self): self._dss_wrapper.cleanup_subs_in_area(self._isa_area) + # Explicitly clean up in the separate area in case the DSS does + # not return that subscription when searching in the ISA's footpring + self._dss_wrapper.cleanup_subs_in_area(self._slight_overlap_area) def cleanup(self): self.begin_cleanup() diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/utils.py b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/utils.py index 036876ed93..54e20a6655 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/utils.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/utils.py @@ -1,5 +1,7 @@ from typing import Optional +from s2sphere import LatLng + from monitoring.monitorlib.fetch import rid as fetch from monitoring.monitorlib.infrastructure import UTMClientSession from monitoring.monitorlib.mutate import rid as mutate diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py b/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py index f1f1fb2de5..bbc277b0ca 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py @@ -303,6 +303,9 @@ def put_isa_expect_response_code( participant_id=self._dss.participant_id, ) + for notification_query in mutated_isa.notifications.values(): + self._scenario.record_query(notification_query.query) + self.handle_query_result( check=check, q=mutated_isa.dss_query, diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/v19/dss/isa_subscription_interactions.md b/monitoring/uss_qualifier/scenarios/astm/netrid/v19/dss/isa_subscription_interactions.md index 6b66615868..9fcbb82edf 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/v19/dss/isa_subscription_interactions.md +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/v19/dss/isa_subscription_interactions.md @@ -51,7 +51,7 @@ When a pre-existing ISA needs to be deleted to ensure a clean workspace, any sub This test case will do the following, using the DSS being tested: 1. Create an ISA with the configured footprint, -2. Create a subscription for the ISA's area, and expect: +2. Do several variants of creating and possibly mutating a subscription, either in or close to the ISA's area, and expect: - to find the created ISA mentioned in the reply - the notification index of the subscription to be 0 3. Modify the ISA, and expect: @@ -62,7 +62,10 @@ This test case will do the following, using the DSS being tested: - the notification index of the subscription to be greater than it was after the mutation 5. Delete the subscription. -### ISA Subscription Interactions test step +### New Subscription within ISA test step + +This test step checks for interactions between ISAs and a subscription that is created within the ISA, then +subsequently mutated to only barely intersect with the ISA. #### Create an ISA check @@ -122,6 +125,78 @@ Failure to do so means that the DSS is not properly implementing **[astm.f3411.v Notifications to any subscriber to the created ISA need to be successful. If a notification cannot be delivered, then the **[astm.f3411.v19.NET0730](../../../../../requirements/astm/f3411/v19.md)** requirement to implement the POST ISAs endpoint isn't met. +### New subscription within ISA is mutated to ISA boundary test step + +This test step checks for interactions between ISAs and a subscription that is created within the ISA and the mutated +to only barely overlap with the ISA. + +#### Create an ISA check + +If the ISA cannot be created, the PUT DSS endpoint in **[astm.f3411.v19.DSS0030,a](../../../../../requirements/astm/f3411/v19.md)** is likely not implemented correctly. + +#### Create a subscription within the ISA footprint check + +The DSS should allow the creation of a subscription within the ISA footprint, otherwise it is in violation of **[astm.f3411.v19.DSS0030,c](../../../../../requirements/astm/f3411/v19.md)** + +#### Mutate the subscription towards the ISA boundary check + +The DSS should allow a valid mutation of a subscription's area, otherwise it is in violation of **[astm.f3411.v19.DSS0030,c](../../../../../requirements/astm/f3411/v19.md)** + +#### Subscription for the ISA's area mentions the ISA check + +A subscription that is created for a volume that intersects with the previously created ISA should mention +the previously created ISA. If not, the serving DSS is in violation of **[astm.f3411.v19.DSS0030,c](../../../../../requirements/astm/f3411/v19.md)**. + +#### Mutated subscription has a notification_index of 0 check + +A newly created subscription is expected to have a notification index of 0, otherwise the DSS implementation under +test does not comply with **[astm.f3411.v19.DSS0030,c](../../../../../requirements/astm/f3411/v19.md)** + +#### Mutate the ISA check + +If the ISA cannot be mutated, **[astm.f3411.v19.DSS0030,a](../../../../../requirements/astm/f3411/v19.md)** is likely not implemented correctly. + +#### Response to the mutation of the ISA contains subscription ID check + +When an ISA is mutated, the DSS must return the identifiers for any subscription that was made to the ISA, +or be in violation of **[astm.f3411.v19.DSS0030,a](../../../../../requirements/astm/f3411/v19.md)**. + +#### Subscription to an ISA has its notification index incremented after mutation check + +When an ISA is mutated, the DSS must increment the notification index of any subscription to that ISA, +and return the up-to-date subscription in the response to the query mutating the ISA. + +Failure to do so means that the DSS is not properly implementing **[astm.f3411.v19.DSS0030,a](../../../../../requirements/astm/f3411/v19.md)**. + +#### Subscription that only barely overlaps the ISA contains the ISA check + +A subscription that is created for a volume that only barely overlaps with the previously created ISA should still +contain the ISA in the reply from the server, otherwise the DSS does not comply with **[astm.f3411.v19.DSS0030,c](../../../../../requirements/astm/f3411/v19.md)** + +#### Delete the ISA check + +If that ISA cannot be deleted, the **[astm.f3411.v19.DSS0030,d](../../../../../requirements/astm/f3411/v19.md)** requirement to implement the ISA deletion endpoint might not be met. + +#### Response to the deletion of the ISA contains subscription ID check + +When an ISA is deleted, the DSS must return the identifiers for any subscription that was made to the ISA, +or be in violation of **[astm.f3411.v19.DSS0030,b](../../../../../requirements/astm/f3411/v19.md)**. + +#### Subscription to an ISA has its notification index incremented after deletion check + +When an ISA is deleted, the DSS must increment the notification index of any subscription to that ISA, +and return the up-to-date subscription in the response to the query deleting the ISA. + +Failure to do so means that the DSS is not properly implementing **[astm.f3411.v19.DSS0030,a](../../../../../requirements/astm/f3411/v19.md)**. + +#### Subscription can be deleted check + +**[astm.f3411.v19.DSS0030,d](../../../../../requirements/astm/f3411/v19.md)** requires the implementation of the DSS endpoint to allow callers to delete subscriptions they created. + +#### Notified subscriber check + +Notifications to any subscriber to the created ISA need to be successful. If a notification cannot be delivered, then the **[astm.f3411.v19.NET0730](../../../../../requirements/astm/f3411/v19.md)** requirement to implement the POST ISAs endpoint isn't met. + ## Cleanup The cleanup phase of this test scenario attempts to remove the ISA if the test ended prematurely while diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_subscription_interactions.md b/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_subscription_interactions.md index 526545c24a..8e78991083 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_subscription_interactions.md +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_subscription_interactions.md @@ -51,7 +51,7 @@ When a pre-existing ISA needs to be deleted to ensure a clean workspace, any sub This test case will do the following, using the DSS being tested: 1. Create an ISA with the configured footprint, -2. Create a subscription for the ISA's area, and expect: +2. Do several variants of creating and possibly mutating a subscription, either in or close to the ISA's area, and expect: - to find the created ISA mentioned in the reply - the notification index of the subscription to be 0 3. Modify the ISA, and expect: @@ -62,7 +62,10 @@ This test case will do the following, using the DSS being tested: - the notification index of the subscription to be greater than it was after the mutation 5. Delete the subscription. -### ISA Subscription Interactions test step +### New Subscription within ISA test step + +This test step checks for interactions between ISAs and a subscription that is created within the ISA, then +subsequently mutated to only barely intersect with the ISA. #### Create an ISA check @@ -122,6 +125,78 @@ Failure to do so means that the DSS is not properly implementing **[astm.f3411.v Notifications to any subscriber to the created ISA need to be successful. If a notification cannot be delivered, then the **[astm.f3411.v22a.NET0730](../../../../../requirements/astm/f3411/v22a.md)** requirement to implement the POST ISAs endpoint isn't met. +### New subscription within ISA is mutated to ISA boundary test step + +This test step checks for interactions between ISAs and a subscription that is created within the ISA and the mutated +to only barely overlap with the ISA. + +#### Create an ISA check + +If the ISA cannot be created, the PUT DSS endpoint in **[astm.f3411.v22a.DSS0030,a](../../../../../requirements/astm/f3411/v22a.md)** is likely not implemented correctly. + +#### Create a subscription within the ISA footprint check + +The DSS should allow the creation of a subscription within the ISA footprint, otherwise it is in violation of **[astm.f3411.v22a.DSS0030,c](../../../../../requirements/astm/f3411/v22a.md)** + +#### Mutate the subscription towards the ISA boundary check + +The DSS should allow a valid mutation of a subscription's area, otherwise it is in violation of **[astm.f3411.v22a.DSS0030,c](../../../../../requirements/astm/f3411/v22a.md)** + +#### Subscription for the ISA's area mentions the ISA check + +A subscription that is created for a volume that intersects with the previously created ISA should mention +the previously created ISA. If not, the serving DSS is in violation of **[astm.f3411.v22a.DSS0030,c](../../../../../requirements/astm/f3411/v22a.md)**. + +#### Mutated subscription has a notification_index of 0 check + +A newly created subscription is expected to have a notification index of 0, otherwise the DSS implementation under +test does not comply with **[astm.f3411.v22a.DSS0030,c](../../../../../requirements/astm/f3411/v22a.md)** + +#### Mutate the ISA check + +If the ISA cannot be mutated, **[astm.f3411.v22a.DSS0030,a](../../../../../requirements/astm/f3411/v22a.md)** is likely not implemented correctly. + +#### Response to the mutation of the ISA contains subscription ID check + +When an ISA is mutated, the DSS must return the identifiers for any subscription that was made to the ISA, +or be in violation of **[astm.f3411.v22a.DSS0030,a](../../../../../requirements/astm/f3411/v22a.md)**. + +#### Subscription to an ISA has its notification index incremented after mutation check + +When an ISA is mutated, the DSS must increment the notification index of any subscription to that ISA, +and return the up-to-date subscription in the response to the query mutating the ISA. + +Failure to do so means that the DSS is not properly implementing **[astm.f3411.v22a.DSS0030,a](../../../../../requirements/astm/f3411/v22a.md)**. + +#### Subscription that only barely overlaps the ISA contains the ISA check + +A subscription that is created for a volume that only barely overlaps with the previously created ISA should still +contain the ISA in the reply from the server, otherwise the DSS does not comply with **[astm.f3411.v22a.DSS0030,c](../../../../../requirements/astm/f3411/v22a.md)** + +#### Delete the ISA check + +If that ISA cannot be deleted, the **[astm.f3411.v22a.DSS0030,d](../../../../../requirements/astm/f3411/v22a.md)** requirement to implement the ISA deletion endpoint might not be met. + +#### Response to the deletion of the ISA contains subscription ID check + +When an ISA is deleted, the DSS must return the identifiers for any subscription that was made to the ISA, +or be in violation of **[astm.f3411.v22a.DSS0030,b](../../../../../requirements/astm/f3411/v22a.md)**. + +#### Subscription to an ISA has its notification index incremented after deletion check + +When an ISA is deleted, the DSS must increment the notification index of any subscription to that ISA, +and return the up-to-date subscription in the response to the query deleting the ISA. + +Failure to do so means that the DSS is not properly implementing **[astm.f3411.v22a.DSS0030,a](../../../../../requirements/astm/f3411/v22a.md)**. + +#### Subscription can be deleted check + +**[astm.f3411.v22a.DSS0030,d](../../../../../requirements/astm/f3411/v22a.md)** requires the implementation of the DSS endpoint to allow callers to delete subscriptions they created. + +#### Notified subscriber check + +Notifications to any subscriber to the created ISA need to be successful. If a notification cannot be delivered, then the **[astm.f3411.v22a.NET0730](../../../../../requirements/astm/f3411/v22a.md)** requirement to implement the POST ISAs endpoint isn't met. + ## Cleanup The cleanup phase of this test scenario attempts to remove the ISA if the test ended prematurely while diff --git a/monitoring/uss_qualifier/scenarios/astm/utm/__init__.py b/monitoring/uss_qualifier/scenarios/astm/utm/__init__.py index fee5010cb1..e87ac2555b 100644 --- a/monitoring/uss_qualifier/scenarios/astm/utm/__init__.py +++ b/monitoring/uss_qualifier/scenarios/astm/utm/__init__.py @@ -8,3 +8,4 @@ from .dss_interoperability import DSSInteroperability from .aggregate_checks import AggregateChecks from .prep_planners import PrepareFlightPlanners +from .off_nominal_planning.down_uss import DownUSS diff --git a/monitoring/uss_qualifier/scenarios/astm/utm/off_nominal_planning/__init__.py b/monitoring/uss_qualifier/scenarios/astm/utm/off_nominal_planning/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/monitoring/uss_qualifier/scenarios/astm/utm/off_nominal_planning/down_uss.md b/monitoring/uss_qualifier/scenarios/astm/utm/off_nominal_planning/down_uss.md new file mode 100644 index 0000000000..24556a788c --- /dev/null +++ b/monitoring/uss_qualifier/scenarios/astm/utm/off_nominal_planning/down_uss.md @@ -0,0 +1,125 @@ +# Off-Nominal planning: down USS test scenario + +## Description +This test aims to test the strategic coordination requirements that relate to the down USS mechanism: +- **[astm.f3548.v21.SCD0005](../../../../requirements/astm/f3548/v21.md)** +- **[astm.f3548.v21.SCD0010](../../../../requirements/astm/f3548/v21.md)** + +It involves a single tested USS. The USS qualifier acts as a virtual USS that may have its availability set to down. + +## Resources +### flight_intents +FlightIntentsResource that provides the following flight intents: + +
Flight intent ID | +Flight name | +Priority | +State | +Must conflict with | +Must not conflict with | +
---|---|---|---|---|---|
flight_1_planned_vol_A |
+ Flight 1 | +Any | +Accepted | +Flight 2 | +Flight 2m | +
flight_2_planned_vol_A |
+ Flight 2 | +Higher than Flight 1* | +Accepted | +Flight 1 | +N/A | +