From d39e2185787d404a852c950b45108526f50a0f7e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micka=C3=ABl=20Misbach?= Date: Wed, 13 Sep 2023 18:43:03 +0200 Subject: [PATCH] [uss_qualifier/netrid/dss/isa_simple] Refactor create step ; implement update step --- .../mock_uss/riddp/routes_observation.py | 4 +- monitoring/mock_uss/tracer/tracer_poll.py | 4 +- monitoring/monitorlib/fetch/rid.py | 26 +- monitoring/monitorlib/geo.py | 13 + monitoring/monitorlib/mutate/rid.py | 29 +- monitoring/monitorlib/rid.py | 9 + monitoring/monitorlib/schema_validation.py | 6 + .../configurations/dev/library/resources.yaml | 15 +- .../resources/netrid/service_area.py | 26 +- .../astm/netrid/common/dss/isa_simple.py | 356 ++++++++++++------ .../common/dss/subscription_validation.py | 4 +- .../netrid/common/dss_interoperability.py | 2 +- .../scenarios/astm/netrid/dss_wrapper.py | 151 +++++++- .../astm/netrid/v22a/dss/isa_simple.md | 100 ++++- 14 files changed, 564 insertions(+), 181 deletions(-) diff --git a/monitoring/mock_uss/riddp/routes_observation.py b/monitoring/mock_uss/riddp/routes_observation.py index d4a1f3b5e6..3a1c1c8c4b 100644 --- a/monitoring/mock_uss/riddp/routes_observation.py +++ b/monitoring/mock_uss/riddp/routes_observation.py @@ -92,7 +92,9 @@ def riddp_display_data() -> Tuple[str, int]: # Get ISAs in the DSS t = arrow.utcnow().datetime - isa_list: FetchedISAs = fetch.isas(view, t, t, rid_version, utm_client) + isa_list: FetchedISAs = fetch.isas( + geo.get_latlngrect_vertices(view), t, t, rid_version, utm_client + ) if not isa_list.success: msg = f"Error fetching ISAs from DSS: {isa_list.errors}" logger.error(msg) diff --git a/monitoring/mock_uss/tracer/tracer_poll.py b/monitoring/mock_uss/tracer/tracer_poll.py index 0d9eb7e5eb..4e29c6a8d0 100755 --- a/monitoring/mock_uss/tracer/tracer_poll.py +++ b/monitoring/mock_uss/tracer/tracer_poll.py @@ -21,7 +21,7 @@ from monitoring.mock_uss.tracer import context from monitoring.monitorlib.fetch.rid import FetchedISAs from monitoring.monitorlib.fetch.scd import FetchedEntities -from monitoring.monitorlib.geo import make_latlng_rect +from monitoring.monitorlib.geo import make_latlng_rect, get_latlngrect_vertices from monitoring.monitorlib.infrastructure import UTMClientSession from monitoring.monitorlib.multiprocessing import SynchronizedValue @@ -97,7 +97,7 @@ def poll_observation_areas() -> None: def poll_isas(area: ObservationArea, logger: tracerlog.Logger) -> None: rid_client = context.get_client(area.f3411.auth_spec, area.f3411.dss_base_url) - box = make_latlng_rect(area.area.volume) + box = get_latlngrect_vertices(make_latlng_rect(area.area.volume)) log_name = "poll_isas" t0 = datetime.datetime.utcnow() diff --git a/monitoring/monitorlib/fetch/rid.py b/monitoring/monitorlib/fetch/rid.py index 3da7beb610..42273f1667 100644 --- a/monitoring/monitorlib/fetch/rid.py +++ b/monitoring/monitorlib/fetch/rid.py @@ -654,19 +654,23 @@ def has_different_content_than(self, other: Any) -> bool: def isas( - box: s2sphere.LatLngRect, - start_time: datetime.datetime, - end_time: datetime.datetime, + area: List[s2sphere.LatLng], + start_time: Optional[datetime.datetime], + end_time: Optional[datetime.datetime], rid_version: RIDVersion, session: UTMClientSession, dss_base_url: str = "", ) -> FetchedISAs: - t0 = rid_version.format_time(start_time) - t1 = rid_version.format_time(end_time) + url_time_params = "" + if start_time is not None: + url_time_params += f"&earliest_time={rid_version.format_time(start_time)}" + if end_time is not None: + url_time_params += f"&latest_time={rid_version.format_time(end_time)}" + if rid_version == RIDVersion.f3411_19: op = v19.api.OPERATIONS[v19.api.OperationID.SearchIdentificationServiceAreas] - area = rid_v1.geo_polygon_string_from_s2(geo.get_latlngrect_vertices(box)) - url = f"{dss_base_url}{op.path}?area={area}&earliest_time={t0}&latest_time={t1}" + area = rid_v1.geo_polygon_string_from_s2(area) + url = f"{dss_base_url}{op.path}?area={area}{url_time_params}" return FetchedISAs( v19_query=fetch.query_and_describe( session, op.verb, url, scope=v19.constants.Scope.Read @@ -674,8 +678,8 @@ def isas( ) elif rid_version == RIDVersion.f3411_22a: op = v22a.api.OPERATIONS[v22a.api.OperationID.SearchIdentificationServiceAreas] - area = rid_v2.geo_polygon_string_from_s2(geo.get_latlngrect_vertices(box)) - url = f"{dss_base_url}{op.path}?area={area}&earliest_time={t0}&latest_time={t1}" + area = rid_v2.geo_polygon_string_from_s2(area) + url = f"{dss_base_url}{op.path}?area={area}{url_time_params}" return FetchedISAs( v22a_query=fetch.query_and_describe( session, op.verb, url, scope=v22a.constants.Scope.DisplayProvider @@ -948,7 +952,9 @@ def all_flights( enhanced_details: bool = False, ) -> FetchedFlights: t = datetime.datetime.utcnow() - isa_list = isas(area, t, t, rid_version, session, dss_base_url) + isa_list = isas( + geo.get_latlngrect_vertices(area), t, t, rid_version, session, dss_base_url + ) uss_flight_queries: Dict[str, FetchedUSSFlights] = {} uss_flight_details_queries: Dict[str, FetchedUSSFlightDetails] = {} diff --git a/monitoring/monitorlib/geo.py b/monitoring/monitorlib/geo.py index dc191d5a6a..f921552bc0 100644 --- a/monitoring/monitorlib/geo.py +++ b/monitoring/monitorlib/geo.py @@ -251,3 +251,16 @@ def to_vertices(self) -> List[s2sphere.LatLng]: s2sphere.LatLng.from_degrees(self.lat_max, self.lng_max), s2sphere.LatLng.from_degrees(self.lat_min, self.lng_max), ] + + +class LatLngVertex(ImplicitDict): + """Vertex in latitude and longitude""" + + lat: float + """Latitude (degrees)""" + + lng: float + """Longitude (degrees)""" + + def as_s2sphere(self) -> s2sphere.LatLng: + return s2sphere.LatLng.from_degrees(self.lat, self.lng) diff --git a/monitoring/monitorlib/mutate/rid.py b/monitoring/monitorlib/mutate/rid.py index 2c8184d4b0..ae9e3900cc 100644 --- a/monitoring/monitorlib/mutate/rid.py +++ b/monitoring/monitorlib/mutate/rid.py @@ -14,7 +14,13 @@ import yaml from yaml.representer import Representer -from monitoring.monitorlib import fetch, infrastructure, rid_v1, rid_v2 +from monitoring.monitorlib import ( + fetch, + infrastructure, + rid_v1, + rid_v2, + schema_validation, +) class ChangedSubscription(RIDQuery): @@ -61,6 +67,8 @@ def errors(self) -> List[str]: f"Error parsing F3411-22a USS PutSubscriptionResponse: {str(e)}" ] + # TODO: add schema validation (like ChangedISA) + return [] @property @@ -308,7 +316,11 @@ def _v22a_response( @property def errors(self) -> List[str]: - if self.status_code != 200: + # Tolerate reasonable-but-technically-incorrect code 201 + if not ( + self.status_code == 200 + or (self.mutation == "create" and self.status_code == 201) + ): return ["Failed to mutate ISA ({})".format(self.status_code)] if self.query.response.json is None: return ["ISA response did not include valid JSON"] @@ -337,6 +349,19 @@ def errors(self) -> List[str]: f"Error parsing F3411-22a USS PutIdentificationServiceAreaResponse: {str(e)}" ] + validation_errors = schema_validation.validate( + self.rid_version.openapi_path, + self.rid_version.openapi_delete_isa_response_path + if self.mutation == "delete" + else self.rid_version.openapi_put_isa_response_path, + self.query.response.json, + ) + if validation_errors: + return [ + f"PUT ISA response JSON validation error: [{e.json_path}] {e.message}" + for e in validation_errors + ] + return [] @property diff --git a/monitoring/monitorlib/rid.py b/monitoring/monitorlib/rid.py index 196409a752..d8fe432a5e 100644 --- a/monitoring/monitorlib/rid.py +++ b/monitoring/monitorlib/rid.py @@ -56,6 +56,15 @@ def openapi_put_isa_response_path(self) -> str: else: raise ValueError(f"Unsupported RID version '{self}'") + @property + def openapi_delete_isa_response_path(self) -> str: + if self == RIDVersion.f3411_19: + return schema_validation.F3411_19.DeleteIdentificationServiceAreaResponse + elif self == RIDVersion.f3411_22a: + return schema_validation.F3411_22a.DeleteIdentificationServiceAreaResponse + else: + raise ValueError(f"Unsupported RID version '{self}'") + @property def realtime_period(self) -> timedelta: if self == RIDVersion.f3411_19: diff --git a/monitoring/monitorlib/schema_validation.py b/monitoring/monitorlib/schema_validation.py index 29346ed7cb..56ff259f62 100644 --- a/monitoring/monitorlib/schema_validation.py +++ b/monitoring/monitorlib/schema_validation.py @@ -19,6 +19,9 @@ class F3411_19(str, Enum): PutIdentificationServiceAreaResponse = ( "components.schemas.PutIdentificationServiceAreaResponse" ) + DeleteIdentificationServiceAreaResponse = ( + "components.schemas.DeleteIdentificationServiceAreaResponse" + ) class F3411_22a(str, Enum): @@ -28,6 +31,9 @@ class F3411_22a(str, Enum): PutIdentificationServiceAreaResponse = ( "components.schemas.PutIdentificationServiceAreaResponse" ) + DeleteIdentificationServiceAreaResponse = ( + "components.schemas.DeleteIdentificationServiceAreaResponse" + ) class F3548_21(str, Enum): diff --git a/monitoring/uss_qualifier/configurations/dev/library/resources.yaml b/monitoring/uss_qualifier/configurations/dev/library/resources.yaml index e299b45688..324a0b5d61 100644 --- a/monitoring/uss_qualifier/configurations/dev/library/resources.yaml +++ b/monitoring/uss_qualifier/configurations/dev/library/resources.yaml @@ -32,12 +32,19 @@ net_rid: specification: base_url: https://uss_qualifier.test.utm/dummy_base_url footprint: - lat_min: 37.1853 - lng_min: -80.6140 - lat_max: 37.2148 - lng_max: -80.5440 + - lat: 37.1853 + lng: -80.6140 + - lat: 37.2148 + lng: -80.6140 + - lat: 37.2148 + lng: -80.5440 + - lat: 37.1853 + lng: -80.5440 altitude_min: 0 altitude_max: 3048 + reference_time: '2023-01-10T00:00:00.123456+00:00' + time_start: '2023-01-10T00:00:01.123456+00:00' + time_end: '2023-01-10T01:00:01.123456+00:00' net_rid_sims: adjacent_circular_flights_data: diff --git a/monitoring/uss_qualifier/resources/netrid/service_area.py b/monitoring/uss_qualifier/resources/netrid/service_area.py index d14ce141dc..9e2e01224a 100644 --- a/monitoring/uss_qualifier/resources/netrid/service_area.py +++ b/monitoring/uss_qualifier/resources/netrid/service_area.py @@ -1,5 +1,8 @@ -from implicitdict import ImplicitDict -from monitoring.monitorlib.geo import LatLngBoundingBox +import datetime +from typing import List + +from implicitdict import ImplicitDict, StringBasedDateTime +from monitoring.monitorlib.geo import LatLngVertex from monitoring.uss_qualifier.resources.resource import Resource @@ -12,7 +15,7 @@ class ServiceAreaSpecification(ImplicitDict): This URL will probably not identify a real resource in tests.""" - footprint: LatLngBoundingBox + footprint: List[LatLngVertex] """2D outline of service area""" altitude_min: float = 0 @@ -21,6 +24,23 @@ class ServiceAreaSpecification(ImplicitDict): altitude_max: float = 3048 """Upper altitude bound of service area, meters above WGS84 ellipsoid""" + reference_time: StringBasedDateTime + """Reference time used to adjust start and end times at runtime""" + + time_start: StringBasedDateTime + """Start time of service area (relative to reference_time)""" + + time_end: StringBasedDateTime + """End time of service area (relative to reference_time)""" + + def shifted_time_start(self, now: datetime.datetime) -> datetime.datetime: + dt = now - self.reference_time.datetime + return self.time_start.datetime + dt + + def shifted_time_end(self, now: datetime.datetime) -> datetime.datetime: + dt = now - self.reference_time.datetime + return self.time_end.datetime + dt + class ServiceAreaResource(Resource[ServiceAreaSpecification]): specification: ServiceAreaSpecification diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_simple.py b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_simple.py index 0a583a954d..a31d10fd97 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_simple.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/isa_simple.py @@ -1,8 +1,9 @@ -from datetime import timedelta +import datetime +from typing import Optional, List import arrow +import s2sphere -from monitoring.monitorlib import schema_validation from monitoring.monitorlib.fetch import rid as fetch from monitoring.monitorlib.mutate import rid as mutate from monitoring.prober.infrastructure import register_resource_type @@ -10,10 +11,15 @@ from monitoring.uss_qualifier.resources.astm.f3411.dss import DSSInstanceResource from monitoring.uss_qualifier.resources.interuss.id_generator import IDGeneratorResource from monitoring.uss_qualifier.resources.netrid.service_area import ServiceAreaResource +from monitoring.uss_qualifier.scenarios.astm.netrid.dss_wrapper import DSSWrapper from monitoring.uss_qualifier.scenarios.scenario import GenericTestScenario - -MAX_SKEW = 1e-6 # seconds maximum difference between expected and actual timestamps +HUGE_VERTICES: List[s2sphere.LatLng] = [ + s2sphere.LatLng.from_degrees(lng=130, lat=-23), + s2sphere.LatLng.from_degrees(lng=130, lat=-24), + s2sphere.LatLng.from_degrees(lng=132, lat=-24), + s2sphere.LatLng.from_degrees(lng=132, lat=-23), +] class ISASimple(GenericTestScenario): @@ -28,10 +34,19 @@ def __init__( isa: ServiceAreaResource, ): super().__init__() - self._dss = dss.dss_instance + self._dss = ( + dss.dss_instance + ) # TODO: delete once _delete_isa_if_exists updated to use dss_wrapper + self._dss_wrapper = DSSWrapper(self, dss.dss_instance) self._isa_id = id_generator.id_factory.make_id(ISASimple.ISA_TYPE) + self._isa_version: Optional[str] = None self._isa = isa.specification + now = arrow.utcnow().datetime + self._isa_start_time = self._isa.shifted_time_start(now) + self._isa_end_time = self._isa.shifted_time_end(now) + self._isa_area = [vertex.as_s2sphere() for vertex in self._isa.footprint] + def run(self): self.begin_test_scenario() @@ -45,7 +60,14 @@ def run(self): def _setup_case(self): self.begin_test_case("Setup") - self._ensure_clean_workspace_step() + def _ensure_clean_workspace_step(): + self.begin_test_step("Ensure clean workspace") + + self._delete_isa_if_exists() + + self.end_test_step() + + _ensure_clean_workspace_step() self.end_test_case() @@ -94,148 +116,234 @@ def _delete_isa_if_exists(self): query_timestamps=[notification.query.request.timestamp], ) - def _ensure_clean_workspace_step(self): - self.begin_test_step("Ensure clean workspace") + def _get_isa_by_id_step(self): + self.begin_test_step("Get ISA by ID") - self._delete_isa_if_exists() + with self.check( + "Successful ISA query", [self._dss_wrapper.participant_id] + ) as check: + fetched = self._dss_wrapper.get_isa(check, self._isa_id) + + with self.check( + "ISA version match", [self._dss_wrapper.participant_id] + ) as check: + if ( + self._isa_version is not None + and fetched.isa.version != self._isa_version + ): + check.record_failed( + "DSS returned ISA with incorrect version", + Severity.High, + f"DSS should have returned an ISA with the version {self._isa_version}, but instead the ISA returned had the version {fetched.isa.version}", + query_timestamps=[fetched.query.request.timestamp], + ) self.end_test_step() def _create_and_check_isa_case(self): self.begin_test_case("Create and check ISA") - self._create_isa_step() + def _create_isa_step(): + self.begin_test_step("Create ISA") + + with self.check("ISA created", [self._dss.participant_id]) as check: + isa_change = self._dss_wrapper.put_isa( + check, + area_vertices=self._isa_area, + start_time=self._isa_start_time, + end_time=self._isa_end_time, + uss_base_url=self._isa.base_url, + isa_id=self._isa_id, + isa_version=self._isa_version, + alt_lo=self._isa.altitude_min, + alt_hi=self._isa.altitude_max, + ) + self._isa_version = isa_change.dss_query.isa.version - # TODO: Get ISA by ID + self.end_test_step() + + _create_isa_step() + + self._get_isa_by_id_step() self.end_test_case() - def _create_isa_step(self): - self.begin_test_step("Create ISA") - - start_time = arrow.utcnow().datetime + timedelta(seconds=1) - end_time = start_time + timedelta(minutes=60) - area = self._isa.footprint.to_vertices() - isa_change = mutate.put_isa( - area_vertices=area, - start_time=start_time, - end_time=end_time, - uss_base_url=self._isa.base_url, - isa_id=self._isa_id, - rid_version=self._dss.rid_version, - utm_client=self._dss.client, - isa_version=None, - alt_lo=self._isa.altitude_min, - alt_hi=self._isa.altitude_max, - ) - self.record_query(isa_change.dss_query.query) - for notification_query in isa_change.notifications.values(): - self.record_query(notification_query.query) - t_dss = isa_change.dss_query.query.request.timestamp - - with self.check("ISA created", [self._dss.participant_id]) as check: - if isa_change.dss_query.status_code == 200: - check.record_passed() - elif isa_change.dss_query.status_code == 201: - check.record_failed( - f"PUT ISA returned technically-incorrect 201", - Severity.Low, - "DSS should return 200 from PUT ISA, but instead returned the reasonable-but-technically-incorrect code 201", - query_timestamps=[t_dss], - ) - else: - check.record_failed( - f"PUT ISA returned {isa_change.dss_query.status_code}", - Severity.High, - f"DSS should return 200 from PUT ISA, but instead returned {isa_change.dss_query.status_code}", - query_timestamps=[t_dss], + def _update_and_search_isa_case(self): + self.begin_test_case("Update and search ISA") + + def _update_isa_step(): + self.begin_test_step("Update ISA") + + self._isa_end_time = self._isa_end_time + datetime.timedelta(seconds=1) + with self.check("ISA updated", [self._dss_wrapper.participant_id]) as check: + mutated_isa = self._dss_wrapper.put_isa( + check, + area_vertices=self._isa_area, + start_time=self._isa_start_time, + end_time=self._isa_end_time, + uss_base_url=self._isa.base_url, + isa_id=self._isa_id, + isa_version=self._isa_version, + alt_lo=self._isa.altitude_min, + alt_hi=self._isa.altitude_max, ) + self._isa_version = mutated_isa.dss_query.isa.version - with self.check("ISA ID matches", [self._dss.participant_id]) as check: - if isa_change.dss_query.isa.id != self._isa_id: - check.record_failed( - f"PUT ISA returned ISA with incorrect ID", - Severity.High, - f"DSS should have recorded and returned the ISA ID {self._isa_id} as requested in the path, but response body instead specified {isa_change.dss_query.isa.id}", - query_timestamps=[t_dss], + self.end_test_step() + + _update_isa_step() + + self._get_isa_by_id_step() + + def _search_invalid_params_step(): + self.begin_test_step("Search with invalid params") + + with self.check( + "Search request rejected", [self._dss_wrapper.participant_id] + ) as check: + _ = self._dss_wrapper.search_isas_expect_response_code( + check, + expected_error_codes={400}, + area=[], ) - with self.check("ISA URL matches", [self._dss.participant_id]) as check: - expected_flights_url = self._dss.rid_version.flights_url_of( - self._isa.base_url - ) - actual_flights_url = isa_change.dss_query.isa.flights_url - if actual_flights_url != expected_flights_url: - check.record_failed( - f"PUT ISA returned ISA with incorrect URL", - Severity.High, - f"DSS should have returned an ISA with a flights URL of {expected_flights_url}, but instead the ISA returned had a flights URL of {actual_flights_url}", - query_timestamps=[t_dss], + + self.end_test_step() + + _search_invalid_params_step() + + def _search_earliest_incl_step(): + self.begin_test_step("Search by earliest time (included)") + + with self.check( + "ISA returned by search", [self._dss_wrapper.participant_id] + ) as check: + earliest = self._isa_end_time - datetime.timedelta(minutes=1) + isas = self._dss_wrapper.search_isas( + check, + area=self._isa_area, + start_time=earliest, ) - with self.check("ISA start time matches", [self._dss.participant_id]) as check: - if ( - abs((isa_change.dss_query.isa.time_start - start_time).total_seconds()) - > MAX_SKEW - ): - check.record_failed( - "PUT ISA returned ISA with incorrect start time", - Severity.High, - f"DSS should have returned an ISA with a start time of {start_time}, but instead the ISA returned had a start time of {isa_change.dss_query.isa.time_start}", - query_timestamps=[t_dss], + if self._isa_id not in isas.isas.keys(): + check.record_failed( + f"ISAs search did not return expected ISA {self._isa_id}", + severity=Severity.High, + details=f"Search in area {self._isa_area} from time {earliest} returned ISAs {isas.isas.keys()}", + query_timestamps=[isas.dss_query.query.request.timestamp], + ) + + self.end_test_step() + + _search_earliest_incl_step() + + def _search_earliest_excl_step(): + self.begin_test_step("Search by earliest time (excluded)") + + with self.check( + "ISA not returned by search", [self._dss_wrapper.participant_id] + ) as check: + earliest = self._isa_end_time + datetime.timedelta(minutes=1) + isas = self._dss_wrapper.search_isas( + check, + area=self._isa_area, + start_time=earliest, ) - with self.check("ISA end time matches", [self._dss.participant_id]) as check: - if ( - abs((isa_change.dss_query.isa.time_end - end_time).total_seconds()) - > MAX_SKEW - ): - check.record_failed( - "PUT ISA returned ISA with incorrect end time", - Severity.High, - f"DSS should have returned an ISA with an end time of {end_time}, but instead the ISA returned had an end time of {isa_change.dss_query.isa.time_end}", - query_timestamps=[t_dss], + if self._isa_id in isas.isas.keys(): + check.record_failed( + f"ISAs search returned unexpected ISA {self._isa_id}", + severity=Severity.High, + details=f"Search in area {self._isa_area} from time {earliest} returned ISAs {isas.isas.keys()}", + query_timestamps=[isas.dss_query.query.request.timestamp], + ) + + self.end_test_step() + + _search_earliest_excl_step() + + def _search_latest_incl_step(): + self.begin_test_step("Search by latest time (included)") + + with self.check( + "ISA returned by search", [self._dss_wrapper.participant_id] + ) as check: + latest = self._isa_start_time + datetime.timedelta(minutes=1) + isas = self._dss_wrapper.search_isas( + check, + area=self._isa_area, + end_time=latest, ) - with self.check("ISA version format", [self._dss.participant_id]) as check: - if not all( - c not in "\0\t\r\n#%/:?@[\]" for c in isa_change.dss_query.isa.version - ): - check.record_failed( - "PUT ISA returned ISA with invalid version format", - Severity.High, - f"DSS returned an ISA with a version that is not URL-safe: {isa_change.dss_query.isa.version}", - query_timestamps=[t_dss], + if self._isa_id not in isas.isas.keys(): + check.record_failed( + f"ISAs search did not return expected ISA {self._isa_id}", + severity=Severity.High, + details=f"Search in area {self._isa_area} to time {latest} returned ISAs {isas.isas.keys()}", + query_timestamps=[isas.dss_query.query.request.timestamp], + ) + + self.end_test_step() + + _search_latest_incl_step() + + def _search_latest_excl_step(): + self.begin_test_step("Search by latest time (excluded)") + + with self.check( + "ISA not returned by search", [self._dss_wrapper.participant_id] + ) as check: + latest = self._isa_start_time - datetime.timedelta(minutes=1) + isas = self._dss_wrapper.search_isas( + check, + area=self._isa_area, + end_time=latest, ) + if self._isa_id in isas.isas.keys(): + check.record_failed( + f"ISAs search returned unexpected ISA {self._isa_id}", + severity=Severity.High, + details=f"Search in area {self._isa_area} to time {latest} returned ISAs {isas.isas.keys()}", + query_timestamps=[isas.dss_query.query.request.timestamp], + ) - with self.check("ISA response format", [self._dss.participant_id]) as check: - errors = schema_validation.validate( - self._dss.rid_version.openapi_path, - self._dss.rid_version.openapi_put_isa_response_path, - isa_change.dss_query.query.response.json, - ) - if errors: - details = "\n".join(f"[{e.json_path}] {e.message}" for e in errors) - check.record_failed( - "PUT ISA response format was invalid", - Severity.Medium, - "Found the following schema validation errors in the DSS response:\n" - + details, - query_timestamps=[t_dss], + self.end_test_step() + + _search_latest_excl_step() + + def _search_area_only_step(): + self.begin_test_step("Search by area only") + + with self.check( + "Successful ISA search", [self._dss_wrapper.participant_id] + ) as check: + isas = self._dss_wrapper.search_isas( + check, + area=self._isa_area, ) + if self._isa_id not in isas.isas.keys(): + check.record_failed( + f"ISAs search did not return expected ISA {self._isa_id}", + severity=Severity.High, + details=f"Search in area {self._isa_area} returned ISAs {isas.isas.keys()}", + query_timestamps=[isas.dss_query.query.request.timestamp], + ) - # TODO: Validate subscriber notifications + self.end_test_step() - self.end_test_step() + _search_area_only_step() - def _update_and_search_isa_case(self): - self.begin_test_case("Update and search ISA") + def _search_huge_area_step(): + self.begin_test_step("Search by huge area") - # TODO: Update ISA - # TODO: Get ISA by ID - # TODO: Search with invalid params - # TODO: Search by earliest time (included) - # TODO: Search by earliest time (excluded) - # TODO: Search by latest time (included) - # TODO: Search by latest time (excluded) - # TODO: Search by area only - # TODO: Search by huge area + with self.check( + "Search request rejected", [self._dss_wrapper.participant_id] + ) as check: + _ = self._dss_wrapper.search_isas_expect_response_code( + check, + expected_error_codes={413}, + area=HUGE_VERTICES, + ) + + self.end_test_step() + + _search_huge_area_step() self.end_test_case() diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/subscription_validation.py b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/subscription_validation.py index 1c89883739..fda32ab3e3 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/subscription_validation.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss/subscription_validation.py @@ -81,7 +81,7 @@ def _clean_any_sub(self): "Successful subscription query", [self._dss.participant_id] ) as check: fetched = self._dss_wrapper.search_subs( - check, self._isa.footprint.to_vertices() + check, [vertex.as_s2sphere() for vertex in self._isa.footprint] ) for sub_id in fetched.subscriptions.keys(): with self.check( @@ -232,7 +232,7 @@ def _check_properly_truncated( def _default_subscription_params(self, duration: datetime.timedelta) -> Dict: now = datetime.datetime.utcnow() return dict( - area_vertices=self._isa.footprint.to_vertices(), + area_vertices=[vertex.as_s2sphere() for vertex in self._isa.footprint], alt_lo=self._isa.altitude_min, alt_hi=self._isa.altitude_max, start_time=now, diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss_interoperability.py b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss_interoperability.py index 38ad78a2b4..e7b8758d17 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss_interoperability.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/common/dss_interoperability.py @@ -27,7 +27,7 @@ def _default_params(duration: datetime.timedelta) -> Dict: - now = datetime.datetime.utcnow() + now = datetime.datetime.now().astimezone() return dict( area_vertices=VERTICES, alt_lo=20, diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py b/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py index 8b9cfea7dd..c05b7d16d1 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/dss_wrapper.py @@ -9,6 +9,7 @@ FetchedSubscriptions, RIDQuery, FetchedISA, + FetchedISAs, ) from monitoring.monitorlib.mutate import rid as mutate from monitoring.monitorlib.fetch import rid as fetch @@ -20,6 +21,8 @@ TestScenario, ) +MAX_SKEW = 1e-6 # seconds maximum difference between expected and actual timestamps + class DSSWrapper(object): """Wraps a DSS instance with test checks.""" @@ -90,6 +93,74 @@ def _handle_query_result( query_timestamps=[q.query.request.timestamp], ) + def search_isas( + self, + check: PendingCheck, + area: List[s2sphere.LatLng], + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + ) -> FetchedISAs: + """Search for ISAs at the DSS. + A check fail is considered of high severity and as such will raise a ScenarioCannotContinueError. + + :return: the DSS response + """ + try: + isas = fetch.isas( + area=area, + start_time=start_time, + end_time=end_time, + rid_version=self._dss.rid_version, + session=self._dss.client, + ) + + self._handle_query_result(check, isas, f"Failed to search ISAs in {area}") + return isas + + except QueryError as e: + self._handle_query_error(check, e) + raise RuntimeError( + "DSS query was not successful, but a High Severity issue didn't interrupt execution" + ) + + def search_isas_expect_response_code( + self, + check: PendingCheck, + expected_error_codes: Set[int], + area: List[s2sphere.LatLng], + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + ) -> FetchedISAs: + """Attempt to search for ISAs at the DSS, and expect the specified HTTP response code. + A check fail is considered of high severity and as such will raise a ScenarioCannotContinueError. + + :return: the DSS response + """ + try: + isas = fetch.isas( + area=area, + start_time=start_time, + end_time=end_time, + rid_version=self._dss.rid_version, + session=self._dss.client, + ) + + self._handle_query_result( + check=check, + q=isas, + required_status_code=expected_error_codes, + fail_msg=f"Searching for ISAs resulted in an HTTP code not in {expected_error_codes}", + fail_details=f"Search area: {area}; from {start_time} to {end_time}", + ) + + return isas + + except QueryError as e: + self._handle_query_error(check, e) + raise RuntimeError( + "DSS query was not successful, but a High Severity issue didn't interrupt execution" + ) + def get_isa( self, check: PendingCheck, @@ -165,39 +236,79 @@ def put_isa( self._handle_query_result( check, mutated_isa.dss_query, f"Failed to insert ISA {isa_id}" ) + for notification_query in mutated_isa.notifications.values(): + self._scenario.record_query(notification_query.query) - dt = abs( - mutated_isa.dss_query.isa.time_end.timestamp() - end_time.timestamp() - ) - if dt > 0.001: + t_dss = mutated_isa.dss_query.query.request.timestamp + dss_isa = mutated_isa.dss_query.isa + + if mutated_isa.dss_query.query.status_code == 201: check.record_failed( - summary=f"DSS did not correctly create or update ISA; mismatched end time", - severity=Severity.Medium, + summary=f"PUT ISA returned technically-incorrect 201", + severity=Severity.Low, participants=[self._dss.participant_id], - details=f"Expected: '{end_time}', received: '{mutated_isa.dss_query.isa.time_end}'", - query_timestamps=[mutated_isa.dss_query.query.request.timestamp], + details="DSS should return 200 from PUT ISA, but instead returned the reasonable-but-technically-incorrect code 201", + query_timestamps=[t_dss], ) - elif isa_id != mutated_isa.dss_query.isa.id: + + if isa_id != dss_isa.id: check.record_failed( summary=f"DSS did not return correct ISA", severity=Severity.High, participants=[self._dss.participant_id], - details=f"Expected ISA ID {isa_id} but got {mutated_isa.dss_query.isa.id}", - query_timestamps=[mutated_isa.dss_query.query.request.timestamp], + details=f"Expected ISA ID {isa_id} but got {dss_isa.id}", + query_timestamps=[t_dss], ) - elif ( - isa_version is not None - and mutated_isa.dss_query.isa.version == isa_version - ): + + if isa_version is not None: + if dss_isa.version == isa_version: + check.record_failed( + summary=f"ISA was not modified", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"Got old version {isa_version} while expecting new version", + query_timestamps=[t_dss], + ) + if not all(c not in "\0\t\r\n#%/:?@[\]" for c in dss_isa.version): + check.record_failed( + summary=f"DSS returned ISA (ID {isa_id}) with invalid version format", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"DSS returned an ISA with a version that is not URL-safe: {dss_isa.version}", + query_timestamps=[t_dss], + ) + + if abs((dss_isa.time_start - start_time).total_seconds()) > MAX_SKEW: check.record_failed( - summary=f"ISA was not modified", + summary=f"DSS returned ISA (ID {isa_id}) with incorrect start time", severity=Severity.High, participants=[self._dss.participant_id], - details=f"Got old version {isa_version} while expecting new version", - query_timestamps=[mutated_isa.dss_query.query.request.timestamp], + details=f"DSS should have returned an ISA with a start time of {start_time}, but instead the ISA returned had a start time of {dss_isa.time_start}", + query_timestamps=[t_dss], ) - else: - return mutated_isa + if abs((dss_isa.time_end - end_time).total_seconds()) > MAX_SKEW: + check.record_failed( + summary=f"DSS returned ISA (ID {isa_id}) with incorrect end time", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"DSS should have returned an ISA with an end time of {end_time}, but instead the ISA returned had an end time of {dss_isa.time_end}", + query_timestamps=[t_dss], + ) + + expected_flights_url = self._dss.rid_version.flights_url_of(uss_base_url) + actual_flights_url = dss_isa.flights_url + if actual_flights_url != expected_flights_url: + check.record_failed( + summary=f"DSS returned ISA (ID {isa_id}) with incorrect URL", + severity=Severity.High, + participants=[self._dss.participant_id], + details=f"DSS should have returned an ISA with a flights URL of {expected_flights_url}, but instead the ISA returned had a flights URL of {actual_flights_url}", + query_timestamps=[t_dss], + ) + + # TODO: Validate subscriber notifications + + return mutated_isa except QueryError as e: self._handle_query_error(check, e) diff --git a/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_simple.md b/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_simple.md index f8b0b8d157..b7d971df78 100644 --- a/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_simple.md +++ b/monitoring/uss_qualifier/scenarios/astm/netrid/v22a/dss/isa_simple.md @@ -42,37 +42,113 @@ When a pre-existing ISA needs to be deleted to ensure a clean workspace, any sub ### Create ISA test step -This step attempts to create an ISA with a 60-minute expiration. +This step attempts to create at the DSS the ISA provided as resource. #### ISA created check If the ISA cannot be created, the PUT DSS endpoint in **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is likely not implemented correctly. -#### ISA ID matches check +When the ISA is created, the DSS returns the ID of the ISA in the response body. If this ID does not match the ID in the resource path, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** was not implemented correctly and this check will fail. + +When the ISA is created, the DSS returns the URL of the ISA in the response body. If this URL does not match the URL requested, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. + +The ISA creation request specified an exact start time slightly past now, so the DSS should have created an ISA starting at exactly that time. If the DSS response indicates the ISA start time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. + +The ISA creation request specified an exact end time, so the DSS should have created an ISA ending at exactly that time. If the DSS response indicates the ISA end time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. + +Because the ISA version must be used in URLs, it must be URL-safe even though the ASTM standards do not explicitly require this. If the indicated ISA version is not URL-safe, this check will fail. + +The API for **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** specifies an explicit format that the DSS responses must follow. If the DSS response does not validate against this format, this check will fail. + +### Get ISA by ID test step + +This step attempts to retrieve at the DSS the ISA just created. +#### Successful ISA query check +TODO + - fails if ID not OK! + +#### ISA version match check +TODO When the ISA is created, the DSS returns the ID of the ISA in the response body. If this ID does not match the ID in the resource path, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** was not implemented correctly and this check will fail. -#### ISA URL matches check -When the ISA is created, the DSS returns the URL of the ISA in the response body. If this URL does not match the URL requested, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. +## Update and search ISA test case -#### ISA start time matches check +### Update ISA test step -The ISA creation request specified an exact start time slightly past now, so the DSS should have created an ISA starting at exactly that time. If the DSS response indicates the ISA start time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. +This step attempts to update at the DSS the ISA previously created with a slightly different end time. -#### ISA end time matches check +#### ISA updated check -The ISA creation request specified an exact end time, so the DSS should have created an ISA ending at exactly that time. If the DSS response indicates the ISA end time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. +If the ISA cannot be created, the PUT DSS endpoint in **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is likely not implemented correctly. -#### ISA version format check +When the ISA is created, the DSS returns the ID of the ISA in the response body. If this ID does not match the ID in the resource path, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** was not implemented correctly and this check will fail. -Because the ISA version must be used in URLs, it must be URL-safe even though the ASTM standards do not explicitly require this. If the indicated ISA version is not URL-safe, this check will fail. +When the ISA is created, the DSS returns the URL of the ISA in the response body. If this URL does not match the URL requested, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. -#### ISA response format check +The ISA creation request specified an exact start time slightly past now, so the DSS should have created an ISA starting at exactly that time. If the DSS response indicates the ISA start time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. + +The ISA creation request specified an exact end time, so the DSS should have created an ISA ending at exactly that time. If the DSS response indicates the ISA end time is not this value, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** is not implemented correctly and this check will fail. + +Because the ISA version must be used in URLs, it must be URL-safe even though the ASTM standards do not explicitly require this. If the indicated ISA version is not URL-safe, this check will fail. The API for **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** specifies an explicit format that the DSS responses must follow. If the DSS response does not validate against this format, this check will fail. -## Update and search ISA test case +### Get ISA by ID test step + +This step attempts to retrieve at the DSS the ISA just update. + +#### Successful ISA query check +TODO + - fails if ID not OK! + +#### ISA version match check +TODO +When the ISA is created, the DSS returns the ID of the ISA in the response body. If this ID does not match the ID in the resource path, **[astm.f3411.v22a.DSS0030](../../../../../requirements/astm/f3411/v22a.md)** was not implemented correctly and this check will fail. + +### Search with invalid params test step +TODO + +#### Search request rejected check +TODO + +### Search by earliest time (included) test step +TODO + +#### ISA returned by search check +TODO + +### Search by earliest time (excluded) test step +TODO + +#### ISA not returned by search check +TODO + +### Search by latest time (included) test step +TODO + +#### ISA returned by search check +TODO + +### Search by latest time (excluded) test step +TODO + +#### ISA not returned by search check +TODO + +### Search by area only test step +TODO + +#### Successful ISA search check +TODO + +### Search by huge area test step +TODO + +#### Search request rejected check +TODO + ## Delete ISA test case