From bafa306ca11542ab215e4431a0b8e85fa4c6d63f Mon Sep 17 00:00:00 2001 From: Doug Mills <110824173+dougmills-DIT@users.noreply.github.com> Date: Wed, 15 Jan 2025 08:30:03 +0000 Subject: [PATCH] Tp2000 1640 new ref doc data importer (#1370) * Updates to tests, alignment checks and UI to show pass and failure details * Updates to tests, alignment checks and UI to show pass and failure details * updates based on pre-commit hooks * updates based on pre-commit hooks * updates to function descriptions * updates based on pre commit hooks * updated based on comments * fix tests --- common/tests/test_util.py | 46 ++ common/util.py | 8 +- reference_documents/check/base.py | 302 +++++++---- reference_documents/check/check_runner.py | 14 +- .../check/ref_order_numbers.py | 10 +- .../check/ref_quota_definitions.py | 10 +- .../check/ref_quota_suspensions.py | 2 +- reference_documents/check/ref_rates.py | 68 ++- reference_documents/csv_importer/importer.py | 468 ++++++++++++++++++ .../reference_document_csv_upload_forms.py | 153 ++++++ .../alignment_reports/rerun_check.jinja | 31 ++ .../jinja2/reference_documents/index.jinja | 6 + .../create.jinja | 16 + .../create_success.jinja | 20 + .../details.jinja | 34 ++ .../reference_document_csv_upload/index.jinja | 34 ++ .../reference_document_versions/checks.jinja | 16 +- ...treportcheck_target_start_date_and_more.py | 147 ++++++ reference_documents/models.py | 165 ++++-- reference_documents/tasks.py | 40 ++ reference_documents/tests/checks/test_base.py | 135 ++++- .../tests/checks/test_check_runner.py | 1 + .../tests/checks/test_ref_order_numbers.py | 4 +- .../checks/test_ref_quota_definitions.py | 14 +- .../checks/test_ref_quota_suspensions.py | 6 +- .../tests/checks/test_ref_rates.py | 77 ++- reference_documents/tests/factories.py | 24 + .../tests/support/test_order_numbers.csv | 4 + .../tests/support/test_preferential_rates.csv | 3 + .../tests/support/test_quota_definitions.csv | 4 + .../tests/test_alignment_report_model.py | 74 ++- .../tests/test_csv_upload_model.py | 44 ++ .../tests/test_csv_upload_views.py | 146 ++++++ reference_documents/tests/test_importer.py | 324 ++++++++++++ reference_documents/urls.py | 42 ++ .../views/alignment_report_views.py | 113 ++++- .../views/reference_document_csv_upload.py | 103 ++++ 37 files changed, 2510 insertions(+), 198 deletions(-) create mode 100644 reference_documents/csv_importer/importer.py create mode 100644 reference_documents/forms/reference_document_csv_upload_forms.py create mode 100644 reference_documents/jinja2/reference_documents/alignment_reports/rerun_check.jinja create mode 100644 reference_documents/jinja2/reference_documents/reference_document_csv_upload/create.jinja create mode 100644 reference_documents/jinja2/reference_documents/reference_document_csv_upload/create_success.jinja create mode 100644 reference_documents/jinja2/reference_documents/reference_document_csv_upload/details.jinja create mode 100644 reference_documents/jinja2/reference_documents/reference_document_csv_upload/index.jinja create mode 100644 reference_documents/migrations/0003_csvupload_alignmentreportcheck_target_start_date_and_more.py create mode 100644 reference_documents/tests/support/test_order_numbers.csv create mode 100644 reference_documents/tests/support/test_preferential_rates.csv create mode 100644 reference_documents/tests/support/test_quota_definitions.csv create mode 100644 reference_documents/tests/test_csv_upload_model.py create mode 100644 reference_documents/tests/test_csv_upload_views.py create mode 100644 reference_documents/tests/test_importer.py create mode 100644 reference_documents/views/reference_document_csv_upload.py diff --git a/common/tests/test_util.py b/common/tests/test_util.py index 33be1ddd6..388d3b2d5 100644 --- a/common/tests/test_util.py +++ b/common/tests/test_util.py @@ -1,3 +1,4 @@ +import datetime import json import os from unittest import mock @@ -11,6 +12,7 @@ from common.tests import models from common.tests.util import Dates from common.tests.util import wrap_numbers_over_max_digits +from common.util import TaricDateRange from common.validators import UpdateType from geo_areas.models import GeographicalArea from geo_areas.models import GeographicalAreaDescription @@ -582,3 +584,47 @@ def test_make_real_edit_create_delete(): # since the FK to geo area on description has on_delete=models.CASCADE this will delete the description as well assert deleted_geo_area == None assert workbasket.tracked_models.count() == 0 + + +@pytest.mark.parametrize( + "date_range,compared_date_range,expected", + ( + ( + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)), + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)), + True, + ), + ( + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)), + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 1)), + True, + ), + ( + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 1)), + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)), + False, + ), + ( + TaricDateRange(datetime.date(2020, 1, 1)), + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 2)), + True, + ), + ( + TaricDateRange(datetime.date(2020, 1, 1), datetime.date(2020, 1, 1)), + TaricDateRange(datetime.date(2020, 1, 1)), + False, + ), + ( + TaricDateRange(datetime.date(2020, 1, 1)), + TaricDateRange(datetime.date(2020, 1, 1)), + True, + ), + ( + TaricDateRange(datetime.date(2020, 1, 2)), + TaricDateRange(datetime.date(2020, 1, 1)), + False, + ), + ), +) +def test_contains(date_range, compared_date_range, expected): + assert date_range.contains(compared_date_range) == expected diff --git a/common/util.py b/common/util.py index a9c98089c..bbdecf610 100644 --- a/common/util.py +++ b/common/util.py @@ -205,7 +205,13 @@ def contains(self, compared_date_range: TaricDateRange): bool: True if the date range is fully contained within a provided date range, else False. """ lower_contained = self.lower <= compared_date_range.lower - upper_contained = self.upper_inf or self.upper >= compared_date_range.upper + if compared_date_range.upper_inf: + if self.upper_inf: + upper_contained = True + else: + upper_contained = False + else: + upper_contained = self.upper_inf or self.upper >= compared_date_range.upper return lower_contained and upper_contained diff --git a/reference_documents/check/base.py b/reference_documents/check/base.py index 31bcc14e3..72ae02b99 100644 --- a/reference_documents/check/base.py +++ b/reference_documents/check/base.py @@ -69,6 +69,83 @@ def run_check(self) -> (AlignmentReportCheckStatus, str): that the class represents. """ + @abc.abstractmethod + def get_area_id(self) -> (AlignmentReportCheckStatus, str): + """ + Abstract method, required to be implemented on inheriting classes. + + gets the area_id for the object being checked. + """ + + @abc.abstractmethod + def get_validity(self) -> (AlignmentReportCheckStatus, str): + """ + Abstract method, required to be implemented on inheriting classes. + + gets validity for the object being checked. + """ + + def tap_geo_areas(self): + """ + Finds the geo areas / geo area groups in TAP for a given preferential + quota using data from the associated reference document. + + returns: + [GeographicalAreas] or None + """ + geo_areas_to_search = [] + + try: + # get geo area + geo_area = GeographicalArea.objects.latest_approved().get( + area_id=self.get_area_id(), + valid_between__contains=self.get_validity().lower, + ) + + geo_areas_to_search.append(geo_area) + + if geo_area.is_group(): + # if it's a group collect members + members = list( + x.member for x in geo_area.members.latest_approved().all() + ) + else: + # if not a group - get single member + members = [geo_area] + + # find all groups matching the validity range + query = Q( + valid_between__contains=self.get_validity().lower, + ) + + # filter groups by ends with - if available + if self.get_validity().upper: + query = query & Q(valid_between__contains=self.get_validity().upper) + + geo_area_groups = GeographicalArea.objects.latest_approved().filter(query) + + # filter the groups by members in group / single member + if len(members) > 0: + for geo_area_group in geo_area_groups: + match = True + for member in members: + if ( + not geo_area_group.members.all() + .filter(member=member) + .exists() + ): + match = False + break + + if match: + geo_areas_to_search.append(geo_area_group) + + # return results + return geo_areas_to_search + + except GeographicalArea.DoesNotExist: + return [] + class BaseQuotaDefinitionCheck(BaseCheck, abc.ABC): """ @@ -97,6 +174,16 @@ def __init__(self, ref_quota_definition: RefQuotaDefinition): ) self.reference_document = self.reference_document_version.reference_document + def get_area_id(self) -> (AlignmentReportCheckStatus, str): + """Returns the related area id from the reference_document.""" + + return self.reference_document.area_id + + def get_validity(self) -> (AlignmentReportCheckStatus, str): + """Gets validity for the object being checked.""" + + return self.ref_quota_definition.valid_between + def tap_order_number(self, order_number: str = None): """ Finds order number in TAP for a given preferential quota. @@ -112,23 +199,6 @@ def tap_order_number(self, order_number: str = None): return super().tap_order_number(order_number) - def geo_area(self): - """ - Finds the geo area in TAP for a given preferential quota using data from - the associated reference document. - - returns: - GeographicalArea - """ - geo_area = ( - GeographicalArea.objects.latest_approved() - .filter( - area_id=self.reference_document_version.reference_document.area_id, - ) - .first() - ) - return geo_area - def geo_area_description(self): """ Gets the geo area description for a given preferential quota. @@ -138,7 +208,7 @@ def geo_area_description(self): """ geo_area_desc = ( GeographicalAreaDescription.objects.latest_approved() - .filter(described_geographicalarea=self.geo_area()) + .filter(described_geographicalarea__area_id=self.reference_document.area_id) .last() ) return geo_area_desc.description @@ -205,7 +275,7 @@ def measures(self): valid_between__endswith__gte=self.ref_quota_definition.valid_between.lower, order_number=self.tap_order_number(), goods_nomenclature=self.commodity_code(), - geographical_area=self.geo_area(), + geographical_area__in=self.tap_geo_areas(), measure_type__sid=143, ) .order_by("valid_between") @@ -389,6 +459,19 @@ def __init__(self, ref_order_number: RefOrderNumber): """ super().__init__() self.ref_order_number = ref_order_number + self.reference_document = ( + ref_order_number.reference_document_version.reference_document + ) + + def get_area_id(self) -> (AlignmentReportCheckStatus, str): + """Returns the related area id from the reference_document.""" + + return self.reference_document.area_id + + def get_validity(self) -> (AlignmentReportCheckStatus, str): + """Gets validity for the object being checked.""" + + return self.ref_order_number.valid_between def tap_order_number(self, order_number: str = None): """ @@ -425,6 +508,19 @@ def __init__(self, ref_quota_suspension: RefQuotaSuspension): """ super().__init__() self.ref_quota_suspension = ref_quota_suspension + self.reference_document = ( + ref_quota_suspension.ref_quota_definition.ref_order_number.reference_document_version.reference_document + ) + + def get_area_id(self) -> (AlignmentReportCheckStatus, str): + """Returns the related area id from the reference_document.""" + + return self.reference_document.area_id + + def get_validity(self) -> (AlignmentReportCheckStatus, str): + """Gets validity for the object being checked.""" + + return self.ref_quota_suspension.valid_between def tap_quota_definition(self): """ @@ -504,6 +600,17 @@ def __init__(self, ref_rate: RefRate): """ super().__init__() self.ref_rate = ref_rate + self.reference_document = ref_rate.reference_document_version.reference_document + + def get_area_id(self) -> (AlignmentReportCheckStatus, str): + """Returns the related area id from the reference_document.""" + + return self.reference_document.area_id + + def get_validity(self) -> (AlignmentReportCheckStatus, str): + """Gets validity for the object being checked.""" + + return self.ref_rate.valid_between def get_snapshot(self, comm_code=None) -> Optional[CommodityTreeSnapshot]: """ @@ -527,6 +634,9 @@ def get_snapshot(self, comm_code=None) -> Optional[CommodityTreeSnapshot]: else: return None + # removes pairs of zeros until it's just the comm code, no trailing zeros + # e.g. 0101010000 > 010101 + # this is required for CommodityCollectionLoader while item_id[-2:] == "00": item_id = item_id[0 : len(item_id) - 2] @@ -534,7 +644,11 @@ def get_snapshot(self, comm_code=None) -> Optional[CommodityTreeSnapshot]: prefix=item_id, ).load(current_only=True) - latest_transaction = Transaction.objects.order_by("created_at").last() + latest_transaction = ( + Transaction.objects.filter(workbasket__status="PUBLISHED") + .order_by("created_at") + .last() + ) snapshot = CommodityTreeSnapshot( commodities=commodities_collection.commodities, @@ -555,35 +669,21 @@ def tap_comm_code(self): return None goods = GoodsNomenclature.objects.latest_approved().filter( - ( - Q(valid_between__contains=self.ref_rate.valid_between.lower) - & Q(valid_between__contains=self.ref_rate.valid_between.upper) - ), + valid_between__contains=self.ref_rate.valid_between.lower, item_id=self.ref_rate.commodity_code, suffix=80, ) + if self.ref_rate.valid_between.upper: + goods = goods.filter( + valid_between__contains=self.ref_rate.valid_between.upper, + ) + if len(goods) == 0: return None return goods.first() - def tap_geo_area(self): - """ - Finds the latest approved version of a geographical area in TAP for a - given a preferential rate. - - returns: - GeographicalArea or None - """ - try: - return GeographicalArea.objects.latest_approved().get( - area_id=self.ref_rate.reference_document_version.reference_document.area_id, - ) - - except GeographicalArea.DoesNotExist: - return None - def tap_geo_area_description(self) -> Optional[str]: """ Finds the latest approved version of a geographical area description in @@ -592,14 +692,21 @@ def tap_geo_area_description(self) -> Optional[str]: returns: string (the description of a geographical area) or None """ - geo_area = ( - GeographicalAreaDescription.objects.latest_approved() - .filter(described_geographicalarea=self.tap_geo_area()) - .last() - ) + try: + geo_area_description = ( + GeographicalAreaDescription.objects.latest_approved() + .filter( + described_geographicalarea__area_id=self.reference_document.area_id, + ) + .last() + ) + except GeographicalAreaDescription.DoesNotExist: + return None + except AttributeError: + return None - if geo_area: - return geo_area.description + if geo_area_description: + return geo_area_description.description else: return None @@ -632,62 +739,59 @@ def tap_related_measures(self, comm_code_item_id: str = None): depending on query results """ if comm_code_item_id: - good = GoodsNomenclature.objects.latest_approved().filter( + query = Q( ( Q( valid_between__contains=self.ref_rate.valid_between.lower, ) - & Q( - valid_between__contains=self.ref_rate.valid_between.upper, - ) ), item_id=comm_code_item_id, suffix=80, ) + if self.ref_rate.valid_between.upper: + query = query & Q( + valid_between__contains=self.ref_rate.valid_between.upper, + ) + + good = GoodsNomenclature.objects.latest_approved().filter(query) + if len(good) == 1: - return ( - good.first() - .measures.latest_approved() - .filter( - ( - Q( - valid_between__contains=self.ref_rate.valid_between.lower, - ) - & Q( - valid_between__contains=self.ref_rate.valid_between.upper, - ) - ), - geographical_area=self.tap_geo_area(), - measure_type__sid__in=[ - 142, - ], # note : these are the measure types used to identify preferential tariffs - ) + query = Q( + valid_between__contains=self.ref_rate.valid_between.lower, + geographical_area__in=self.tap_geo_areas(), + measure_type__sid__in=[ + 142, + ], ) + + if self.ref_rate.valid_between.upper: + query = query & Q( + valid_between__contains=self.ref_rate.valid_between.upper, + ) + + return good.first().measures.latest_approved().filter(query) else: return [] + else: tap_comm_code = self.tap_comm_code() if tap_comm_code: - return ( - self.tap_comm_code() - .measures.latest_approved() - .filter( - ( - Q( - valid_between__contains=self.ref_rate.valid_between.lower, - ) - & Q( - valid_between__contains=self.ref_rate.valid_between.upper, - ) - ), - geographical_area=self.tap_geo_area(), - measure_type__sid__in=[ - 142, - ], # note : these are the measure types used to identify preferential tariffs - ) + query = Q( + valid_between__contains=self.ref_rate.valid_between.lower, + geographical_area__in=self.tap_geo_areas(), + measure_type__sid__in=[ + 142, + ], ) + + if self.ref_rate.valid_between.upper: + query = query & Q( + valid_between__contains=self.ref_rate.valid_between.upper, + ) + + return self.tap_comm_code().measures.latest_approved().filter(query) else: return [] @@ -695,6 +799,7 @@ def tap_recursive_comm_code_check( self, snapshot: CommodityTreeSnapshot, parent_item_id: str, + parent_item_suffix: str, level: int = 1, ): """ @@ -710,6 +815,7 @@ def tap_recursive_comm_code_check( Args: snapshot: CommodityTreeSnapshot, A snapshot from self.ref_rate.comm_code and children parent_item_id: str, The parent comm code item_id to check + parent_item_suffix: str, The parent comm code suffix to check level: int, the numeric level below the parent comm code that is currently being performed at @@ -719,9 +825,24 @@ def tap_recursive_comm_code_check( # find comm code from snapshot child_commodities = [] for commodity in snapshot.commodities: - if commodity.item_id == parent_item_id and commodity.suffix == "80": - child_commodities = snapshot.get_children(commodity) - break + if ( + commodity.item_id == parent_item_id + and commodity.suffix == parent_item_suffix + ): + # check validity + + if commodity.valid_between.contains(self.ref_rate.valid_between): + + tmp_child_commodities = snapshot.get_children(commodity) + # strange bug in get_children - sometimes it returns the parent. + for child_comm in tmp_child_commodities: + if ( + child_comm.item_id == parent_item_id + and child_comm.suffix == parent_item_suffix + ): + continue + else: + child_commodities.append(child_comm) if len(child_commodities) == 0: print(f'{"-" * level} no more children') @@ -732,11 +853,14 @@ def tap_recursive_comm_code_check( related_measures = self.tap_related_measures(child_commodity.item_id) if len(related_measures) == 0: - print(f'{"-" * level} FAIL : {child_commodity.item_id}') + print( + f'{"-" * level} FAIL : {child_commodity.item_id} : {child_commodity.suffix}', + ) results.append( self.tap_recursive_comm_code_check( snapshot, child_commodity.item_id, + child_commodity.suffix, level + 1, ), ) diff --git a/reference_documents/check/check_runner.py b/reference_documents/check/check_runner.py index 935b1c8d9..be6e8e6f4 100644 --- a/reference_documents/check/check_runner.py +++ b/reference_documents/check/check_runner.py @@ -102,7 +102,11 @@ def run(self): logger.info(f"starting checks for rate {ref_rate.commodity_code}") for ref_rate_check in Checks.get_checks_for(BaseRateCheck): logger.info(f"starting run: check {ref_rate_check.__class__.__name__}") - self.capture_check_result(ref_rate_check(ref_rate), ref_rate=ref_rate) + self.capture_check_result( + ref_rate_check(ref_rate), + ref_rate=ref_rate, + target_start_date=ref_rate.valid_between.lower, + ) # Order number checks for ref_order_number in self.reference_document_version.ref_order_numbers.all(): @@ -118,6 +122,7 @@ def run(self): self.capture_check_result( order_number_check(ref_order_number), ref_order_number=ref_order_number, + target_start_date=ref_order_number.valid_between.lower, ), ) @@ -142,6 +147,7 @@ def run(self): parent_has_failed_or_skipped_result=self.status_contains_failed_or_skipped( order_number_check_statuses, ), + target_start_date=ref_quota_definition.valid_between.lower, ), ) @@ -164,6 +170,7 @@ def run(self): parent_has_failed_or_skipped_result=self.status_contains_failed_or_skipped( pref_quota_check_statuses, ), + target_start_date=ref_quota_suspension.valid_between.lower, ) # Quota definition checks (range) for ( @@ -183,6 +190,7 @@ def run(self): parent_has_failed_or_skipped_result=self.status_contains_failed_or_skipped( order_number_check_statuses, ), + target_start_date=ref_quota_definition.valid_between.lower, ), ) @@ -206,6 +214,7 @@ def run(self): parent_has_failed_or_skipped_result=self.status_contains_failed_or_skipped( pref_quota_check_statuses, ), + target_start_date=pref_suspension.valid_between.lower, ) self.alignment_report.complete() self.alignment_report.save() @@ -221,6 +230,7 @@ def capture_check_result( ref_quota_suspension=None, ref_quota_suspension_range=None, parent_has_failed_or_skipped_result=None, + target_start_date=None, ) -> AlignmentReportCheckStatus: """ Captures the result if a single check and stores it in the database as a @@ -235,6 +245,7 @@ def capture_check_result( ref_quota_suspension: RefQuotaSuspension if available or None ref_quota_suspension_range: RefQuotaSuspensionRange if available or None parent_has_failed_or_skipped_result: boolean + target_start_date: datetime Returns: AlignmentReportCheckStatus: the status of the check @@ -256,6 +267,7 @@ def capture_check_result( "ref_quota_suspension_range": ref_quota_suspension_range, "status": status, "message": message, + "target_start_date": target_start_date, } AlignmentReportCheck.objects.create(**kwargs) diff --git a/reference_documents/check/ref_order_numbers.py b/reference_documents/check/ref_order_numbers.py index 454b93f7f..610c2dae4 100644 --- a/reference_documents/check/ref_order_numbers.py +++ b/reference_documents/check/ref_order_numbers.py @@ -19,16 +19,16 @@ def run_check(self): """ # handle incomplete order number dates (from import) if self.ref_order_number.valid_between is None: - message = f"order number {self.ref_order_number.order_number} cant be checked, no validity date range" + message = f"order number {self.ref_order_number.order_number} cant be checked, no validity date range provided on reference document data" print("FAIL", message) return AlignmentReportCheckStatus.FAIL, message # Verify that the order number exists in TAP elif not self.tap_order_number(): - message = ( - f"order number not found matching {self.ref_order_number.order_number}" - ) + message = f"order number not found matching {self.ref_order_number.order_number} validity {self.ref_order_number.valid_between}" print("FAIL", message) return AlignmentReportCheckStatus.FAIL, message else: - print(f"PASS - order number {self.tap_order_number()} found") + print( + f"PASS - order number {self.tap_order_number()} with validity {self.ref_order_number.valid_between} matched", + ) return AlignmentReportCheckStatus.PASS, "" diff --git a/reference_documents/check/ref_quota_definitions.py b/reference_documents/check/ref_quota_definitions.py index b72f3344a..3dcbd6896 100644 --- a/reference_documents/check/ref_quota_definitions.py +++ b/reference_documents/check/ref_quota_definitions.py @@ -18,17 +18,15 @@ def run_check(self): string: corresponding message for the status. """ if not self.commodity_code(): - message = f"FAIL - commodity code not found" + message = f"FAIL - commodity code {self.ref_quota_definition.commodity_code} not found" return AlignmentReportCheckStatus.FAIL, message elif not self.quota_definition(): - message = f"FAIL - quota definition not found" + message = f"FAIL - quota definition for order number {self.ref_order_number.order_number} and validity {self.ref_quota_definition.valid_between} not found" return AlignmentReportCheckStatus.FAIL, message elif not self.measures(): - message = ( - f"FAIL - measure(s) spanning whole quota definition period not found" - ) + message = f"FAIL - measure(s) spanning whole quota definition period not found for quota definition with order number {self.ref_order_number.order_number} and validity {self.ref_quota_definition.valid_between} " return AlignmentReportCheckStatus.FAIL, message elif not self.duty_rate_matches(): @@ -44,7 +42,7 @@ def run_check(self): if condition.duty_sentence != "": duty_sentences.append(condition.duty_sentence) - message = f"FAIL - duty rate does not match, expected {self.ref_quota_definition.duty_rate} to be in ({' or '.join(duty_sentences)})" + message = f"FAIL - duty rate does not match, expected {self.ref_quota_definition.duty_rate} to be in ({' or '.join(duty_sentences)}) for quota definition with order number {self.ref_order_number.order_number} and validity {self.ref_quota_definition.valid_between} " return AlignmentReportCheckStatus.FAIL, message else: return AlignmentReportCheckStatus.PASS, "" diff --git a/reference_documents/check/ref_quota_suspensions.py b/reference_documents/check/ref_quota_suspensions.py index 6c518bead..4c25e0e1c 100644 --- a/reference_documents/check/ref_quota_suspensions.py +++ b/reference_documents/check/ref_quota_suspensions.py @@ -18,7 +18,7 @@ def run_check(self): string: corresponding message for the status. """ if not self.tap_suspension(): - message = f"FAIL - quota suspension not found" + message = f"FAIL - quota suspension not found for quota linked to order number {self.ref_quota_suspension.ref_quota_definition.ref_order_number.order_number} and quota validity {self.ref_quota_suspension.ref_quota_definition.valid_between} " print(message) return AlignmentReportCheckStatus.FAIL, message else: diff --git a/reference_documents/check/ref_rates.py b/reference_documents/check/ref_rates.py index ecbfe7a90..df7dccda6 100644 --- a/reference_documents/check/ref_rates.py +++ b/reference_documents/check/ref_rates.py @@ -18,7 +18,7 @@ def run_check(self): """ # comm code live on EIF date if not self.tap_comm_code(): - message = f"{self.ref_rate.commodity_code} {self.tap_geo_area_description()} comm code not live" + message = f"Rate {self.ref_rate.commodity_code} {self.ref_rate.valid_between}: commodity code not found for period." print("FAIL", message) return AlignmentReportCheckStatus.FAIL, message @@ -27,21 +27,65 @@ def run_check(self): # this is ok - there is a single measure matching the expected query if len(measures) == 1: - return AlignmentReportCheckStatus.PASS, "" + return ( + AlignmentReportCheckStatus.PASS, + f"{self.tap_comm_code()} {self.ref_rate.valid_between}: rate for commodity code matched", + ) # this is not inline with expected measures presence - check comm code children elif len(measures) == 0: - # check 1 level down for presence of measures - match = self.tap_recursive_comm_code_check( - self.get_snapshot(), - self.ref_rate.commodity_code, - 80, - ) + # check parents from chapter level snapshot + parent_snapshot = self.get_snapshot(self.ref_rate.commodity_code[0:4]) + parent_commodities = [] + child_commodity = None + match_parents = False + match_children = False + + for commodity in parent_snapshot.commodities: + if ( + commodity.item_id == self.ref_rate.commodity_code + and commodity.suffix == "80" + ): + child_commodity = commodity + break + + if child_commodity is not None: + next_parent = child_commodity + while True: + next_parent = parent_snapshot.get_parent(next_parent) + if next_parent is None: + break + + parent_commodities.append(next_parent) + related_measures = self.tap_related_measures(next_parent.item_id) + if len(related_measures) > 0: + match_parents = True + break + if ( + next_parent.item_id + == self.ref_rate.commodity_code[0:4] + "000000" + and next_parent.suffix == "80" + ): + break - message = f"{self.tap_comm_code()} : " + if not match_parents: + # children recursively + match_children = self.tap_recursive_comm_code_check( + self.get_snapshot(), + self.ref_rate.commodity_code, + "80", + 1, + ) - if match: - message += f"matched with children" + message = f"Rate {self.tap_comm_code()} {self.ref_rate.valid_between}: " + + if match_children: + message += f"matched (against commodity code children)" + print("PASS", message) + + return AlignmentReportCheckStatus.PASS, message + if match_parents: + message += f"matched (against commodity code parent)" print("PASS", message) return AlignmentReportCheckStatus.PASS, message @@ -52,6 +96,6 @@ def run_check(self): return AlignmentReportCheckStatus.FAIL, message else: - message = f"{self.tap_comm_code()} : multiple measures match" + message = f"Rate {self.tap_comm_code()} {self.ref_rate.valid_between} : multiple measures match" print("WARNING", message) return AlignmentReportCheckStatus.WARNING, message diff --git a/reference_documents/csv_importer/importer.py b/reference_documents/csv_importer/importer.py new file mode 100644 index 000000000..857f5a26b --- /dev/null +++ b/reference_documents/csv_importer/importer.py @@ -0,0 +1,468 @@ +import csv +import re +from datetime import datetime +from io import StringIO +from logging import getLogger + +from django.db import transaction + +from common.util import TaricDateRange +from geo_areas.models import GeographicalArea +from reference_documents.models import CSVUpload +from reference_documents.models import ReferenceDocument +from reference_documents.models import ReferenceDocumentCsvUploadStatus +from reference_documents.models import ReferenceDocumentVersion +from reference_documents.models import ReferenceDocumentVersionStatus +from reference_documents.models import RefOrderNumber +from reference_documents.models import RefQuotaDefinition +from reference_documents.models import RefRate + +logger = getLogger(__name__) + + +class ReferenceDocumentCSVImporter: + def __init__(self, csv_upload: CSVUpload): + self.csv_upload = csv_upload + + def run(self): + """ + Runs the import process. + + Executes the parsing and database population from the provided CSVUpload + object + """ + if ( + not self.csv_upload.preferential_rates_csv_data + and not self.csv_upload.order_number_csv_data + and not self.csv_upload.quota_definition_csv_data + ): + # mark csv upload as errored, and add message + self.csv_upload.status = ReferenceDocumentCsvUploadStatus.ERRORED + self.csv_upload.error_details = "No CSV data to process, exiting." + self.csv_upload.save() + else: + # process preferential rates + try: + self.csv_upload.processing() + self.csv_upload.save() + # make all changes or none, atomic transaction + with transaction.atomic(): + if self.csv_upload.preferential_rates_csv_data: + self.import_preferential_rates_csv_data() + + # process order numbers + if self.csv_upload.order_number_csv_data: + self.import_order_number_csv_data() + + # process quota definitions + if self.csv_upload.quota_definition_csv_data: + self.import_quota_definition_csv_data() + + self.csv_upload.completed() + self.csv_upload.save() + except Exception as ex: + self.csv_upload.errored() + # add error to CSV upload + if hasattr(ex, "message"): + ex_message = ex.message + else: + ex_message = ex + self.csv_upload.error_details = f"{ex.__class__.__name__}:{ex_message}" + self.csv_upload.save() + + def find_reference_document(self, area_id): + """ + Checks the database to see if a reference document exists matching the area_id + Args: + area_id: the area id the reference document is associated with as a string + + Returns: ReferenceDocument, or none if not matched + + """ + ref_doc_ver_query = ReferenceDocument.objects.filter( + area_id=area_id, + ) + + if ref_doc_ver_query.exists(): + return ref_doc_ver_query.first() + + return None + + def find_reference_document_version( + self, + reference_document_version: float, + reference_document: ReferenceDocument, + status=None, + ): + """ + + Args: + reference_document_version: float, the version of the reference document + reference_document: The reference document as a ReferenceDocument + status: Optional, the status of the reference document from the choices available from ReferenceDocumentVersionStatus.choices + + Returns: boolean, True if matched to a reference document version that is editable, otherwise false + + """ + if status: + ref_doc_ver_query = ReferenceDocumentVersion.objects.filter( + version=reference_document_version, + reference_document=reference_document, + status=status, + ) + else: + ref_doc_ver_query = ReferenceDocumentVersion.objects.filter( + version=reference_document_version, + reference_document=reference_document, + ) + + if ref_doc_ver_query.exists(): + return ref_doc_ver_query.first() + + return None + + @staticmethod + def verify_area_id_exists(area_id): + """ + Verifies that an area id exists in the database. + + Args: + area_id: the area id, e.g. 'JP' + + Returns: + None or raises exception + """ + if ( + not GeographicalArea.objects.latest_approved() + .filter(area_id=area_id) + .exists() + ): + raise ValueError(f"Area ID does not exist in TAP data: {area_id}") + + def verify_comm_code(self, comm_code): + """ + Verifies that a comm code exists in the database. + + Args: + comm_code: the comm code as a string + + Returns: + None or raises exception + """ + if not bool(re.match("^[0123456789]+$", comm_code)): + raise ValueError( + f"{comm_code} is not a valid comm code, it can only contain numbers", + ) + + if len(comm_code) != 10: + raise ValueError( + f"{comm_code} is not a valid comm code, it should be 10 characters long", + ) + + def import_preferential_rates_csv_data(self): + """ + Imports preferential rates data from CSV files. + + Returns: + None or raises exception + """ + logger.info(f" -- IMPORTING PREFERENTIAL RATES") + data = self.get_dictionary_from_csv_data( + self.csv_upload.preferential_rates_csv_data, + ) + + # verify headers + expected_headers = [ + "comm_code", + "rate", + "validity_start", + "validity_end", + "area_id", + "document_version", + ] + + for header in expected_headers: + if header not in data[0].keys(): + raise ValueError( + f"CSV data for preferential rates missing header {header}", + ) + + for row in data: + self.verify_area_id_exists(row["area_id"]) + self.verify_comm_code(row["comm_code"]) + reference_document_version = self.get_or_create_reference_document_version( + row, + ) + + # check if data row exists, use comm code and start date + matching_row = reference_document_version.ref_rates.filter( + commodity_code=row["comm_code"], + valid_between__startswith=row["validity_start"], + ) + + start_date = datetime( + *[int(x) for x in row["validity_start"].split("-")], + ).date() + end_date = row["validity_end"] + + if end_date == "": + end_date = None + else: + end_date = datetime( + *[int(x) for x in row["validity_end"].split("-")], + ).date() + + if matching_row.exists(): + raise Exception( + f"Preferential Rate already exists, details : {row}, matched on commodity_code and start_date.", + ) + else: + RefRate.objects.create( + reference_document_version=reference_document_version, + commodity_code=row["comm_code"], + duty_rate=row["rate"], + valid_between=TaricDateRange(start_date, end_date), + ) + logger.info(f" -- COMPLETED IMPORTING PREFERENTIAL RATES : count: {len(data)}") + + def get_or_create_reference_document_version(self, row): + """ + Gets or creates the reference document version based on the CSV row. + + Args: + row: dict, key value pairs of data from the CSV row + + Returns: + ReferenceDocumentVersion or raises exception + """ + # check if reference document exists + if self.find_reference_document(row["area_id"]): + # use existing reference document + reference_document = ReferenceDocument.objects.get( + area_id=row["area_id"], + ) + else: + # create new reference document + reference_document = ReferenceDocument.objects.create( + area_id=row["area_id"], + title=f'Reference document for area ID {row["area_id"]}', + ) + # check if reference document version is available and editable + reference_document_version = self.find_reference_document_version( + float(row["document_version"]), + reference_document, + ) + # raise exception if the version exists but is not in editing + if reference_document_version: + if ( + reference_document_version.status + != ReferenceDocumentVersionStatus.EDITING + ): + raise Exception( + f"Reference document version {reference_document_version.reference_document.area_id}:{reference_document_version.version} has status {reference_document_version.status} and can not be altered.", + ) + else: + reference_document_version = ReferenceDocumentVersion.objects.create( + version=row["document_version"], + reference_document=reference_document, + ) + return reference_document_version + + def import_order_number_csv_data(self): + """ + Imports order numbers data from CSV files. + + Returns: + None or raises exception + """ + data = self.get_dictionary_from_csv_data(self.csv_upload.order_number_csv_data) + logger.info(f" -- IMPORTING ORDER NUMBERS") + + expected_headers = [ + "order_number", + "validity_start", + "validity_end", + "parent_order_number", + "coefficient", + "relationship_type", + "area_id", + "document_version", + ] + + for header in expected_headers: + if header not in data[0].keys(): + raise ValueError(f"CSV data for order numbers missing header {header}") + + # only ones without parents + for row in data: + self.verify_area_id_exists(row["area_id"]) + + if row["parent_order_number"] != "": + continue + self.process_order_number(row) + + # process order numbers with parents + for row in data: + if row["parent_order_number"] == "": + continue + + self.process_order_number(row) + logger.info(f" -- COMPLETED IMPORTING ORDER NUMBERS : count: {len(data)}") + + def process_order_number(self, row): + """ + Processes order numbers data from CSV files. + + Args: + row: dict, key value pairs of data from the CSV row + + Returns: + None or raises exception + """ + reference_document_version = self.get_or_create_reference_document_version(row) + start_date = datetime( + *[int(x) for x in row["validity_start"].split("-")], + ).date() + end_date = row["validity_end"] + if end_date == "": + end_date = None + else: + end_date = datetime( + *[int(x) for x in row["validity_end"].split("-")], + ).date() + # check if data row exists, use comm code and start date + matching_row = reference_document_version.ref_order_numbers.filter( + order_number=row["order_number"], + valid_between__startswith=row["validity_start"], + ) + if matching_row.exists(): + raise Exception( + f"Order Number already exists, details : {row}, matched on order number and start_date.", + ) + else: + coefficient = row["coefficient"] + parent_order_number = row["parent_order_number"] + relationship_type = row["relationship_type"] + + if coefficient == "": + coefficient = None + + if parent_order_number == "": + parent_order_number = None + else: + parent_order_number_query = ( + reference_document_version.ref_order_numbers.filter( + order_number=parent_order_number, + ) + ) + if parent_order_number_query.exists(): + parent_order_number = parent_order_number_query.first() + else: + raise Exception( + f"Parent Order Number {parent_order_number} does not exist.", + ) + + if relationship_type == "": + relationship_type = None + + RefOrderNumber.objects.create( + reference_document_version=reference_document_version, + order_number=row["order_number"], + valid_between=TaricDateRange(start_date, end_date), + coefficient=coefficient, + main_order_number=parent_order_number, + relation_type=relationship_type, + ) + + def import_quota_definition_csv_data(self): + """ + Imports quota definition data from CSV files. + + Returns: + None or raises exception + """ + data = self.get_dictionary_from_csv_data( + self.csv_upload.quota_definition_csv_data, + ) + logger.info(f" -- IMPORTING QUOTA DEFINITIONS") + + expected_headers = [ + "order_number", + "comm_code", + "duty_rate", + "initial_volume", + "measurement", + "validity_start", + "validity_end", + "area_id", + "document_version", + ] + + for header in expected_headers: + if header not in data[0].keys(): + raise ValueError( + f"CSV data for quota definitions missing header {header}", + ) + + for row in data: + self.verify_area_id_exists(row["area_id"]) + self.verify_comm_code(row["comm_code"]) + reference_document_version = self.get_or_create_reference_document_version( + row, + ) + + # check if data row exists, use comm code and start date + matching_row = reference_document_version.ref_quota_definitions().filter( + commodity_code=row["comm_code"], + ref_order_number__order_number=row["order_number"], + valid_between__startswith=row["validity_start"], + ) + + start_date = datetime( + *[int(x) for x in row["validity_start"].split("-")], + ).date() + end_date = row["validity_end"] + + if end_date == "": + end_date = None + else: + end_date = datetime( + *[int(x) for x in row["validity_end"].split("-")], + ).date() + + order_number = reference_document_version.ref_order_numbers.filter( + order_number=row["order_number"], + ) + + if not order_number.exists(): + raise Exception(f'Order Number {row["order_number"]} does not exist.') + + volume = float(row["initial_volume"]) + measurement = row["measurement"] + + if matching_row.exists(): + raise Exception( + f"Quota Definition already exists, details : {row}, matched on commodity_code, order number and start_date.", + ) + else: + RefQuotaDefinition.objects.create( + commodity_code=row["comm_code"], + duty_rate=row["duty_rate"], + valid_between=TaricDateRange(start_date, end_date), + ref_order_number=order_number.first(), + volume=volume, + measurement=measurement, + ) + logger.info(f" -- COMPLETED IMPORTING QUOTA DEFINITIONS : count: {len(data)}") + + def get_dictionary_from_csv_data(self, string): + """ + Returns a dictionary from CSV string. + + Returns: + dictionary or raises exception + """ + csv_string_io = StringIO(string) + csv_reader = csv.DictReader(csv_string_io) + data = [row for row in csv_reader] + return data diff --git a/reference_documents/forms/reference_document_csv_upload_forms.py b/reference_documents/forms/reference_document_csv_upload_forms.py new file mode 100644 index 000000000..0e2bff82a --- /dev/null +++ b/reference_documents/forms/reference_document_csv_upload_forms.py @@ -0,0 +1,153 @@ +from crispy_forms_gds.helper import FormHelper +from crispy_forms_gds.layout import Layout +from crispy_forms_gds.layout import Size +from crispy_forms_gds.layout import Submit +from django import forms +from django.core.exceptions import ValidationError + +from reference_documents.models import CSVUpload +from reference_documents.tasks import import_reference_document_data + + +class ReferenceDocumentCreateCsvUploadForm(forms.ModelForm): + preferential_rates_csv_data = forms.FileField(required=False) + order_number_csv_data = forms.FileField(required=False) + quota_definition_csv_data = forms.FileField(required=False) + + class Meta: + model = CSVUpload + fields = [ + "preferential_rates_csv_data", + "order_number_csv_data", + "quota_definition_csv_data", + ] + + def __init__( + self, + *args, + **kwargs, + ): + super().__init__(*args, **kwargs) + self.helper = FormHelper(self) + self.helper.label_size = Size.SMALL + self.helper.legend_size = Size.SMALL + self.helper.layout = Layout( + "preferential_rates_csv_data", + "order_number_csv_data", + "quota_definition_csv_data", + Submit( + "submit", + "Save", + data_module="govuk-button", + data_prevent_double_click="true", + ), + ) + + def clean(self): + pass + + # check at least one file has been uploaded + check_fields = [ + "preferential_rates_csv_data", + "order_number_csv_data", + "quota_definition_csv_data", + ] + + at_least_one = False + for field in check_fields: + if field in self.cleaned_data.keys(): + at_least_one = True + + if not at_least_one: + self.add_error( + "preferential_rates_csv_data", + "Upload at least one CSV file in any of the file fields", + ) + + if len(self.errors): + raise forms.ValidationError(" & ".join(self.errors)) + + def clean_preferential_rates_csv_data(self): + expected_headers = [ + "comm_code", + "rate", + "validity_start", + "validity_end", + "area_id", + "document_version", + ] + if self.cleaned_data.get("preferential_rates_csv_data"): + headers = ( + self.cleaned_data.get("preferential_rates_csv_data") + .file.readline() + .decode("utf-8") + ) + headers_list = headers.strip().split(",") + if headers_list == expected_headers: + return headers + self.cleaned_data.get( + "preferential_rates_csv_data", + ).file.read().decode("utf-8") + else: + raise ValidationError( + f"Headers not correct, expected {expected_headers} got {headers_list}", + ) + + def clean_order_number_csv_data(self): + expected_headers = [ + "order_number", + "validity_start", + "validity_end", + "parent_order_number", + "coefficient", + "relationship_type", + "area_id", + "document_version", + ] + if self.cleaned_data.get("order_number_csv_data"): + headers = ( + self.cleaned_data.get("order_number_csv_data") + .file.readline() + .decode("utf-8") + ) + headers_list = headers.strip().split(",") + if headers_list == expected_headers: + return headers + self.cleaned_data.get( + "order_number_csv_data", + ).file.read().decode("utf-8") + else: + raise ValidationError( + f"Headers not correct, expected {expected_headers} got {headers_list}", + ) + + def clean_quota_definition_csv_data(self): + expected_headers = [ + "order_number", + "comm_code", + "duty_rate", + "initial_volume", + "measurement", + "validity_start", + "validity_end", + "area_id", + "document_version", + ] + if self.cleaned_data.get("quota_definition_csv_data"): + headers = ( + self.cleaned_data.get("quota_definition_csv_data") + .file.readline() + .decode("utf-8") + ) + headers_list = headers.strip().split(",") + if headers_list == expected_headers: + return headers + self.cleaned_data.get( + "quota_definition_csv_data", + ).file.read().decode("utf-8") + else: + raise ValidationError( + f"Headers not correct, expected {expected_headers} got {headers_list}", + ) + + def save(self, **kwargs): + self.instance.save() + import_reference_document_data.delay(self.instance.pk) + return diff --git a/reference_documents/jinja2/reference_documents/alignment_reports/rerun_check.jinja b/reference_documents/jinja2/reference_documents/alignment_reports/rerun_check.jinja new file mode 100644 index 000000000..0328f7bb5 --- /dev/null +++ b/reference_documents/jinja2/reference_documents/alignment_reports/rerun_check.jinja @@ -0,0 +1,31 @@ +{% extends "layouts/layout.jinja" %} + +{% from "components/table/macro.njk" import govukTable %} +{% from "components/tabs/macro.njk" import govukTabs %} +{% from "components/breadcrumbs/macro.njk" import govukBreadcrumbs %} + +{% set page_title = "Rerun check for Alignment Report details for " ~ reference_document_version.reference_document.area_id ~ " version " ~ reference_document_version.version %} + +{% block breadcrumb %} + {{ govukBreadcrumbs({ + "items": [{"text": "Home", "href": url("home")}, + {"text": "View reference documents", "href": url("reference_documents:index")}, + {"text": "Reference document " ~ reference_document_version.reference_document.area_id, "href": url("reference_documents:details", kwargs={"pk":reference_document_version.reference_document.pk})}, + {"text": "Version " ~ reference_document_version.version}] + }) }} +{% endblock %} + +{% block content %} +
+ Created at : {{ object.created_at.strftime("%Y/%m/%d, %H:%M:%S") }} +
++ Status : {{ object.status }} +
++ Content : {{ object.csv_content_types() }} +
++ Error details : {{ object.error_details }} +
+{% endblock %} + + + diff --git a/reference_documents/jinja2/reference_documents/reference_document_csv_upload/index.jinja b/reference_documents/jinja2/reference_documents/reference_document_csv_upload/index.jinja new file mode 100644 index 000000000..bb95b0f69 --- /dev/null +++ b/reference_documents/jinja2/reference_documents/reference_document_csv_upload/index.jinja @@ -0,0 +1,34 @@ +{% extends "layouts/layout.jinja" %} +{% from "components/table/macro.njk" import govukTable %} +{% from "components/breadcrumbs/macro.njk" import govukBreadcrumbs %} + +{% set page_title = 'CSV upload index' %} +{% set create_url = "create" %} + +{% block breadcrumb %} + {{ govukBreadcrumbs({ + "items": [{"text": "Home", "href": url("home")}, + {"text": "View reference documents"}], + }) }} +{% endblock %} + +{% block content %} ++ {% if request.user.has_perm('reference_documents.add_referencedocument') %} + + Upload reference document CSV data + + {% endif %} +
+Run a check to see how this reference document's data compares to the data held in TAP. Any discrepancies will be flagged.
{% else %} -If you believe you should have permission to run this check, please contact someone on the TAP team for support.
+Run a check to see how this reference document's data compares to the data held in TAP. Any discrepancies will be flagged.
+Version {{ object.version }} of the reference document does not have an entry into + force date. Update the reference document version with the correct entry into force + date to enable alignment checks.
+ {% endif %} {% else %} -Reference document version must be in a published state to run checks against TAP dat.
+If you believe you should have permission to run this check, please contact someone on the TAP team for support.
{% endif %} +
{% if last_run %}
Last Run: ({{ last_run }})
diff --git a/reference_documents/migrations/0003_csvupload_alignmentreportcheck_target_start_date_and_more.py b/reference_documents/migrations/0003_csvupload_alignmentreportcheck_target_start_date_and_more.py
new file mode 100644
index 000000000..1351dda26
--- /dev/null
+++ b/reference_documents/migrations/0003_csvupload_alignmentreportcheck_target_start_date_and_more.py
@@ -0,0 +1,147 @@
+# Generated by Django 4.2.15 on 2024-12-19 14:51
+
+import datetime
+
+import django.db.models.deletion
+import django_fsm
+from django.db import migrations
+from django.db import models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ (
+ "reference_documents",
+ "0002_alignmentreport_alignmentreportcheck_refordernumber_and_more",
+ ),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="CSVUpload",
+ fields=[
+ (
+ "id",
+ models.AutoField(
+ auto_created=True,
+ primary_key=True,
+ serialize=False,
+ verbose_name="ID",
+ ),
+ ),
+ ("created_at", models.DateTimeField(auto_now_add=True)),
+ ("updated_at", models.DateTimeField(auto_now=True)),
+ (
+ "status",
+ django_fsm.FSMField(
+ choices=[
+ ("PENDING", "Pending"),
+ ("PROCESSING", "Processing"),
+ ("COMPLETE", "Complete"),
+ ("ERRORED", "Errored"),
+ ],
+ db_index=True,
+ default="PENDING",
+ editable=False,
+ max_length=50,
+ ),
+ ),
+ (
+ "preferential_rates_csv_data",
+ models.TextField(blank=True, null=True),
+ ),
+ ("order_number_csv_data", models.TextField(blank=True, null=True)),
+ ("quota_definition_csv_data", models.TextField(blank=True, null=True)),
+ ("error_details", models.TextField(blank=True, null=True)),
+ ],
+ options={
+ "abstract": False,
+ },
+ ),
+ migrations.AddField(
+ model_name="alignmentreportcheck",
+ name="target_start_date",
+ field=models.DateTimeField(default=datetime.date(2024, 1, 1)),
+ preserve_default=False,
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_order_number",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_order_number_checks",
+ to="reference_documents.refordernumber",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_quota_definition",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_quota_definition_checks",
+ to="reference_documents.refquotadefinition",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_quota_definition_range",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_quota_definition_range_checks",
+ to="reference_documents.refquotadefinitionrange",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_quota_suspension",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_quota_suspension_checks",
+ to="reference_documents.refquotasuspension",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_quota_suspension_range",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_quota_suspension_range_checks",
+ to="reference_documents.refquotasuspensionrange",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="alignmentreportcheck",
+ name="ref_rate",
+ field=models.ForeignKey(
+ blank=True,
+ null=True,
+ on_delete=django.db.models.deletion.CASCADE,
+ related_name="ref_rate_checks",
+ to="reference_documents.refrate",
+ ),
+ ),
+ migrations.AlterField(
+ model_name="refordernumber",
+ name="relation_type",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("EQ", "Equivalent to main quota"),
+ ("NM", "Normal (restrictive to main quota)"),
+ ],
+ max_length=2,
+ null=True,
+ ),
+ ),
+ ]
diff --git a/reference_documents/models.py b/reference_documents/models.py
index 7b1e18c55..0b0ccef60 100644
--- a/reference_documents/models.py
+++ b/reference_documents/models.py
@@ -1,7 +1,9 @@
from datetime import date
from django.db import models
+from django.db.models import Count
from django.db.models import fields
+from django.db.models.functions import TruncYear
from django_fsm import FSMField
from django_fsm import transition
@@ -48,6 +50,19 @@ class AlignmentReportStatus(models.TextChoices):
ERRORED = "ERRORED", "Errored"
+class ReferenceDocumentCsvUploadStatus(models.TextChoices):
+ """Choices for alignment report state."""
+
+ # The check has not started and is queued
+ PENDING = "PENDING", "Pending"
+ # the check is in progress, and currently running
+ PROCESSING = "PROCESSING", "Processing"
+ # The check has completed
+ COMPLETE = "COMPLETE", "Complete"
+ # The check unexpectedly errored during processing
+ ERRORED = "ERRORED", "Errored"
+
+
class ReferenceDocument(TimestampedMixin):
"""
This model represents a reference document, a container / parent for
@@ -350,6 +365,8 @@ class RefOrderNumber(models.Model):
default=None,
)
relation_type = models.CharField(
+ null=True,
+ blank=True,
max_length=2,
choices=validators.SubQuotaType.choices,
)
@@ -793,16 +810,35 @@ def errored(self):
"""The alignment check has errored during execution."""
return
- def unique_check_names(self):
+ def target_start_date_years(self):
+ years = []
+ years_query = (
+ self.alignment_report_checks.annotate(
+ year=TruncYear("target_start_date"),
+ )
+ .values("year")
+ .annotate(count=Count("id"))
+ .values("year", "count")
+ )
+ for year in years_query:
+ if year["year"] not in years:
+ years.append(year["year"].year)
+ return sorted(years)
+
+ def unique_check_names(self, year: int):
"""
Collect all unique check names associated with the AlignmentReport.
Returns:
list(str): a list of unique check names
"""
- return self.alignment_report_checks.distinct("check_name").values_list(
- "check_name",
- flat=True,
+ return (
+ self.alignment_report_checks.filter(target_start_date__year=year)
+ .distinct("check_name")
+ .values_list(
+ "check_name",
+ flat=True,
+ )
)
def check_stats(self):
@@ -814,28 +850,34 @@ def check_stats(self):
"""
stats = {}
- for check_name in self.unique_check_names():
- stats[check_name] = {
- "total": self.alignment_report_checks.filter(
- check_name=check_name,
- ).count(),
- "failed": self.alignment_report_checks.filter(
- check_name=check_name,
- status=AlignmentReportCheckStatus.FAIL,
- ).count(),
- "passed": self.alignment_report_checks.filter(
- check_name=check_name,
- status=AlignmentReportCheckStatus.PASS,
- ).count(),
- "warning": self.alignment_report_checks.filter(
- check_name=check_name,
- status=AlignmentReportCheckStatus.WARNING,
- ).count(),
- "skipped": self.alignment_report_checks.filter(
- check_name=check_name,
- status=AlignmentReportCheckStatus.SKIPPED,
- ).count(),
- }
+ for year in self.target_start_date_years():
+ for check_name in self.unique_check_names(year):
+ stats[check_name + " " + str(year)] = {
+ "total": self.alignment_report_checks.filter(
+ check_name=check_name,
+ target_start_date__year=year,
+ ).count(),
+ "failed": self.alignment_report_checks.filter(
+ check_name=check_name,
+ status=AlignmentReportCheckStatus.FAIL,
+ target_start_date__year=year,
+ ).count(),
+ "passed": self.alignment_report_checks.filter(
+ check_name=check_name,
+ status=AlignmentReportCheckStatus.PASS,
+ target_start_date__year=year,
+ ).count(),
+ "warning": self.alignment_report_checks.filter(
+ check_name=check_name,
+ status=AlignmentReportCheckStatus.WARNING,
+ target_start_date__year=year,
+ ).count(),
+ "skipped": self.alignment_report_checks.filter(
+ check_name=check_name,
+ status=AlignmentReportCheckStatus.SKIPPED,
+ target_start_date__year=year,
+ ).count(),
+ }
return stats
@@ -887,7 +929,7 @@ class AlignmentReportCheck(TimestampedMixin):
ref_quota_definition = models.ForeignKey(
"reference_documents.RefQuotaDefinition",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_quota_definition_checks",
blank=True,
null=True,
@@ -895,7 +937,7 @@ class AlignmentReportCheck(TimestampedMixin):
ref_order_number = models.ForeignKey(
"reference_documents.RefOrderNumber",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_order_number_checks",
blank=True,
null=True,
@@ -903,7 +945,7 @@ class AlignmentReportCheck(TimestampedMixin):
ref_rate = models.ForeignKey(
"reference_documents.RefRate",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_rate_checks",
blank=True,
null=True,
@@ -911,7 +953,7 @@ class AlignmentReportCheck(TimestampedMixin):
ref_quota_definition_range = models.ForeignKey(
"reference_documents.RefQuotaDefinitionRange",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_quota_definition_range_checks",
blank=True,
null=True,
@@ -919,7 +961,7 @@ class AlignmentReportCheck(TimestampedMixin):
ref_quota_suspension = models.ForeignKey(
"reference_documents.RefQuotaSuspension",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_quota_suspension_checks",
blank=True,
null=True,
@@ -927,8 +969,67 @@ class AlignmentReportCheck(TimestampedMixin):
ref_quota_suspension_range = models.ForeignKey(
"reference_documents.RefQuotaSuspensionRange",
- on_delete=models.PROTECT,
+ on_delete=models.CASCADE,
related_name="ref_quota_suspension_range_checks",
blank=True,
null=True,
)
+
+ target_start_date = models.DateTimeField(blank=False, null=False)
+
+
+class CSVUpload(TimestampedMixin):
+ status = FSMField(
+ default=ReferenceDocumentCsvUploadStatus.PENDING,
+ choices=ReferenceDocumentCsvUploadStatus.choices,
+ db_index=True,
+ protected=False,
+ editable=False,
+ )
+
+ preferential_rates_csv_data = models.TextField(blank=True, null=True)
+ order_number_csv_data = models.TextField(blank=True, null=True)
+ quota_definition_csv_data = models.TextField(blank=True, null=True)
+ error_details = models.TextField(blank=True, null=True)
+
+ def csv_content_types(self):
+ csv_upload_content = []
+ if self.preferential_rates_csv_data:
+ csv_upload_content.append("Preferential rates")
+ if self.order_number_csv_data:
+ csv_upload_content.append("Order numbers")
+ if self.quota_definition_csv_data:
+ csv_upload_content.append("Quota definitions")
+ return ", ".join(csv_upload_content)
+
+ @transition(
+ field=status,
+ source=ReferenceDocumentCsvUploadStatus.PROCESSING,
+ target=ReferenceDocumentCsvUploadStatus.ERRORED,
+ custom={
+ "label": "Mark the reference document CSV import as failed with errors.",
+ },
+ )
+ def errored(self):
+ """The reference document csv import has errored during execution."""
+ return
+
+ @transition(
+ field=status,
+ source=ReferenceDocumentCsvUploadStatus.PENDING,
+ target=ReferenceDocumentCsvUploadStatus.PROCESSING,
+ custom={"label": "Mark the reference document CSV import as in processing."},
+ )
+ def processing(self):
+ """The reference document csv import is processing the import."""
+ return
+
+ @transition(
+ field=status,
+ source=ReferenceDocumentCsvUploadStatus.PROCESSING,
+ target=ReferenceDocumentCsvUploadStatus.COMPLETE,
+ custom={"label": "Mark the reference document CSV import as complete."},
+ )
+ def completed(self):
+ """The reference document csv import has completed the import."""
+ return
diff --git a/reference_documents/tasks.py b/reference_documents/tasks.py
index a1dcd5d8b..43bf60d97 100644
--- a/reference_documents/tasks.py
+++ b/reference_documents/tasks.py
@@ -2,6 +2,8 @@
from common.celery import app
from reference_documents.check.check_runner import Checks
+from reference_documents.csv_importer.importer import ReferenceDocumentCSVImporter
+from reference_documents.models import CSVUpload
from reference_documents.models import ReferenceDocumentVersion
logger = getLogger(__name__)
@@ -38,3 +40,41 @@ def run_alignment_check(
logger.info(
f"COMPLETED ALIGNMENT CHECKS : ReferenceDocumentVersion: {reference_document_version_id}",
)
+
+
+@app.task
+def import_reference_document_data(
+ csv_upload_id: int,
+):
+ """
+ Task for running alignment check.
+
+ The task executes alignment checks against a reference document version and
+ records the results in the TAP database for later review.
+ """
+
+ logger.info(
+ f"RUNNING REFERENCE DOCUMENT CSV UPLOAD : csv_upload_id: {csv_upload_id}",
+ )
+
+ csv_upload = CSVUpload.objects.get(pk=csv_upload_id)
+
+ uploaded_data_types = []
+ if csv_upload.preferential_rates_csv_data:
+ uploaded_data_types.append("preferential rates")
+ if csv_upload.order_number_csv_data:
+ uploaded_data_types.append("Order Numbers")
+ if csv_upload.quota_definition_csv_data:
+ uploaded_data_types.append("Quota Definitions")
+
+ logger.info(
+ f"Reference Document CSV upload details:\n"
+ + f" - Upload includes the following : {', '.join(uploaded_data_types)}",
+ )
+
+ csv_importer = ReferenceDocumentCSVImporter(csv_upload)
+ csv_importer.run()
+
+ logger.info(
+ f"COMPLETED REFERENCE DOCUMENT CSV UPLOAD : csv_upload_id: {csv_upload_id}",
+ )
diff --git a/reference_documents/tests/checks/test_base.py b/reference_documents/tests/checks/test_base.py
index cb1f5ef97..a8eaae7df 100644
--- a/reference_documents/tests/checks/test_base.py
+++ b/reference_documents/tests/checks/test_base.py
@@ -6,6 +6,8 @@
from commodities.models.dc import CommodityTreeSnapshot
from common.tests.factories import GeographicalAreaDescriptionFactory
from common.tests.factories import GeographicalAreaFactory
+from common.tests.factories import GeographicalMembershipFactory
+from common.tests.factories import GeoGroupFactory
from common.tests.factories import GoodsNomenclatureFactory
from common.tests.factories import MeasureFactory
from common.tests.factories import QuotaAssociationFactory
@@ -33,7 +35,7 @@ def test_init(self):
with pytest.raises(TypeError) as e:
BaseCheck()
assert (
- "Can't instantiate abstract class BaseCheck without an implementation for abstract method 'run_check'"
+ "Can't instantiate abstract class BaseCheck without an implementation for abstract methods 'get_area_id', 'get_validity', 'run_check'"
in str(e.value)
)
@@ -42,6 +44,12 @@ class Target(BaseCheck):
def run_check(self) -> (AlignmentReportCheckStatus, str):
super().run_check()
+ def get_area_id(self) -> ():
+ pass
+
+ def get_validity(self):
+ pass
+
target = Target()
assert target.run_check() is None
@@ -84,26 +92,30 @@ def test_order_number_match(self):
def test_geo_area_no_match(self):
pref_quota = factories.RefQuotaDefinitionFactory.create()
target = self.Target(pref_quota)
- assert target.geo_area() is None
+ assert target.tap_geo_areas() == []
def test_geo_area_match(self):
- tap_geo_area = GeographicalAreaFactory.create()
+ validity = TaricDateRange(date(2022, 1, 1), date(2024, 1, 1))
+ tap_geo_area = GeographicalAreaFactory.create(valid_between=validity)
pref_quota = factories.RefQuotaDefinitionFactory.create(
ref_order_number__reference_document_version__reference_document__area_id=tap_geo_area.area_id,
+ valid_between=validity,
)
target = self.Target(pref_quota)
- assert target.geo_area() == tap_geo_area
+ assert target.tap_geo_areas() == [tap_geo_area]
def test_geo_area_description_match(self):
- tap_geo_area = GeographicalAreaFactory.create()
+ validity = TaricDateRange(date(2022, 1, 1), date(2024, 1, 1))
+ tap_geo_area = GeographicalAreaFactory.create(valid_between=validity)
pref_quota = factories.RefQuotaDefinitionFactory.create(
ref_order_number__reference_document_version__reference_document__area_id=tap_geo_area.area_id,
+ valid_between=validity,
)
target = self.Target(pref_quota)
description = (
GeographicalAreaDescription.objects.latest_approved()
- .filter(described_geographicalarea=target.geo_area())
+ .filter(described_geographicalarea=target.tap_geo_areas()[0])
.last()
)
@@ -114,7 +126,7 @@ def test_geo_area_description_no_match(self):
ref_order_number__reference_document_version__reference_document__area_id="",
)
target = self.Target(pref_quota)
- assert target.geo_area() is None
+ assert target.tap_geo_areas() == []
def test_commodity_code_no_match(self):
pref_quota = factories.RefQuotaDefinitionFactory.create()
@@ -1040,27 +1052,92 @@ def test_tap_comm_code_does_not_match_if_validity_out(self):
target = self.Target(ref_rate)
assert target.tap_comm_code() is None
- def test_tap_geo_area_matches(self):
- ref_rate = factories.RefRateFactory.create()
+ def test_tap_geo_areas_match(self):
+ ref_rate = factories.RefRateFactory.create(
+ valid_between=TaricDateRange(date(2022, 1, 1), date(2023, 1, 1)),
+ )
tap_geo_area = GeographicalAreaFactory.create(
area_id=ref_rate.reference_document_version.reference_document.area_id,
+ valid_between=TaricDateRange(date(2022, 1, 1), date(2023, 1, 1)),
+ )
+
+ target = self.Target(ref_rate)
+ assert target.tap_geo_areas() == [tap_geo_area]
+
+ def test_tap_geo_areas_match_multiple(self):
+
+ valid_range_wide = TaricDateRange(date(1990, 1, 1))
+ valid_range = TaricDateRange(date(2022, 1, 1), date(2023, 1, 1))
+ valid_range_2 = TaricDateRange(date(2021, 1, 1), date(2024, 1, 1))
+ invalid_range = TaricDateRange(date(2022, 1, 1), date(2022, 6, 1))
+
+ valid_geo_group = GeoGroupFactory.create(
+ area_id="ZZG",
+ valid_between=valid_range,
+ )
+
+ tap_geo_area = GeographicalAreaFactory.create(
+ area_id="ZZ",
+ valid_between=valid_range_wide,
+ )
+
+ tab_geo_area_member = GeographicalMembershipFactory.create(
+ member=tap_geo_area,
+ geo_group=valid_geo_group,
+ valid_between=valid_range,
+ )
+
+ valid_geo_group_2 = GeoGroupFactory.create(
+ area_id="ZZG2",
+ valid_between=valid_range_2,
+ )
+
+ tab_geo_area_member_2 = GeographicalMembershipFactory.create(
+ member=tap_geo_area,
+ geo_group=valid_geo_group_2,
+ valid_between=valid_range_2,
+ )
+
+ invalid_geo_group = GeoGroupFactory.create(
+ area_id="ZZG3",
+ valid_between=invalid_range,
+ )
+
+ tab_geo_area_member_3 = GeographicalMembershipFactory.create(
+ member=tap_geo_area,
+ geo_group=invalid_geo_group,
+ valid_between=invalid_range,
+ )
+
+ ref_rate = factories.RefRateFactory.create(
+ valid_between=TaricDateRange(date(2022, 1, 1), date(2023, 1, 1)),
+ reference_document_version__reference_document__area_id="ZZG",
)
target = self.Target(ref_rate)
- assert target.tap_geo_area() == tap_geo_area
+ assert valid_geo_group in target.tap_geo_areas()
+ assert valid_geo_group_2 in target.tap_geo_areas()
- def test_tap_geo_area_no_match(self):
+ def test_tap_geo_areas_no_match(self):
ref_rate = factories.RefRateFactory.create()
target = self.Target(ref_rate)
- assert target.tap_geo_area() is None
+ assert target.tap_geo_areas() == []
def test_tap_geo_area_description_exists(self):
- ref_rate = factories.RefRateFactory.create()
+ ref_rate = factories.RefRateFactory.create(
+ reference_document_version__reference_document__area_id="AA",
+ valid_between=TaricDateRange(date(2022, 1, 1), date(2023, 1, 1)),
+ )
tap_geo_area_description = GeographicalAreaDescriptionFactory.create(
- described_geographicalarea__area_id=ref_rate.reference_document_version.reference_document.area_id,
+ described_geographicalarea__area_id="AA",
+ validity_start=date(2022, 1, 1),
+ described_geographicalarea__valid_between=TaricDateRange(
+ date(2022, 1, 1),
+ None,
+ ),
)
target = self.Target(ref_rate)
@@ -1100,10 +1177,11 @@ def test_tap_related_measures_match_rate_comm_code(self):
valid_between=validity_range,
goods_nomenclature__item_id=item_id,
goods_nomenclature__valid_between=validity_range,
+ geographical_area__valid_between=TaricDateRange(date(2022, 1, 1)),
)
ref_rate = factories.RefRateFactory.create(
- reference_document_version__entry_into_force_date=None,
+ reference_document_version__entry_into_force_date=date(2022, 1, 1),
reference_document_version__reference_document__area_id=tap_measure.geographical_area.area_id,
valid_between=validity_range,
commodity_code=item_id,
@@ -1114,7 +1192,6 @@ def test_tap_related_measures_match_rate_comm_code(self):
assert tap_measure in target.tap_related_measures()
assert len(target.tap_related_measures()) == 1
assert len(target.tap_related_measures(item_id)) == 1
- assert len(target.tap_related_measures("9876543210")) == 0
def test_tap_related_measures_when_comm_code_not_on_tap(self):
ref_rate = factories.RefRateFactory.create(
@@ -1170,17 +1247,20 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
"direct children covered",
{
"item_id": "0101010000",
+ "suffix": "80",
"add_measure": False,
"indent": 1,
"children": [
{
"item_id": "0101010100",
+ "suffix": "80",
"add_measure": True,
"indent": 2,
"children": [],
},
{
"item_id": "0101010200",
+ "suffix": "80",
"add_measure": True,
"indent": 2,
"children": [],
@@ -1193,6 +1273,7 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
"not covered",
{
"item_id": "0101010000",
+ "suffix": "80",
"add_measure": False,
"indent": 1,
"children": [],
@@ -1203,22 +1284,26 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
"mix, children and grandchildren covered",
{
"item_id": "0101000000",
+ "suffix": "80",
"add_measure": False,
"indent": 1,
"children": [
{
"item_id": "0101010000",
+ "suffix": "80",
"add_measure": False,
"indent": 2,
"children": [
{
"item_id": "0101010100",
+ "suffix": "80",
"add_measure": True,
"indent": 3,
"children": [],
},
{
"item_id": "0101010200",
+ "suffix": "80",
"add_measure": True,
"indent": 3,
"children": [],
@@ -1227,6 +1312,7 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
},
{
"item_id": "0101020000",
+ "suffix": "80",
"add_measure": True,
"indent": 2,
"children": [],
@@ -1239,22 +1325,26 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
"mix partial grandchildren covered",
{
"item_id": "0101000000",
+ "suffix": "80",
"add_measure": False,
"indent": 1,
"children": [
{
"item_id": "0101010000",
+ "suffix": "80",
"add_measure": False,
"indent": 2,
"children": [
{
"item_id": "0101010100",
+ "suffix": "80",
"add_measure": False,
"indent": 3,
"children": [],
},
{
"item_id": "0101010200",
+ "suffix": "80",
"add_measure": True,
"indent": 3,
"children": [],
@@ -1263,6 +1353,7 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
},
{
"item_id": "0101020000",
+ "suffix": "80",
"add_measure": True,
"indent": 2,
"children": [],
@@ -1275,11 +1366,13 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
"direct children covered, multiple measures",
{
"item_id": "0101010000",
+ "suffix": "80",
"add_measure": False,
"indent": 1,
"children": [
{
"item_id": "0101010100",
+ "suffix": "80",
"add_measure": True,
"measure_count": 2,
"indent": 2,
@@ -1287,6 +1380,7 @@ def test_get_snapshot_returns_none_when_comm_code_does_not_exist(self):
},
{
"item_id": "0101010200",
+ "suffix": "80",
"add_measure": True,
"indent": 2,
"children": [],
@@ -1305,10 +1399,12 @@ def test_tap_recursive_comm_code_check(
):
validity_range = TaricDateRange(date(2022, 1, 1), date(2022, 6, 1))
- tap_geo_area = GeographicalAreaFactory.create()
+ tap_geo_area = GeographicalAreaFactory.create(
+ valid_between=TaricDateRange(date(2022, 1, 1)),
+ )
ref_rate = factories.RefRateFactory.create(
- reference_document_version__entry_into_force_date=None,
+ reference_document_version__entry_into_force_date=date(2022, 1, 1),
reference_document_version__reference_document__area_id=tap_geo_area.area_id,
valid_between=validity_range,
commodity_code=comm_code_structure["item_id"],
@@ -1317,7 +1413,7 @@ def test_tap_recursive_comm_code_check(
def create_comm_code_and_measure_if_required(data, validity, geo_area):
tap_comm_code = GoodsNomenclatureFactory(
item_id=data["item_id"],
- suffix=80,
+ suffix=data["suffix"],
indent__indent=data["indent"],
valid_between=validity,
)
@@ -1351,6 +1447,7 @@ def recurse_comm_code_structure(structure, validity, geo_area):
target.tap_recursive_comm_code_check(
target.get_snapshot(),
comm_code_structure["item_id"],
+ comm_code_structure["suffix"],
)
is expected_result
)
diff --git a/reference_documents/tests/checks/test_check_runner.py b/reference_documents/tests/checks/test_check_runner.py
index bfa24d82c..29a1faf43 100644
--- a/reference_documents/tests/checks/test_check_runner.py
+++ b/reference_documents/tests/checks/test_check_runner.py
@@ -171,6 +171,7 @@ def test_capture_check_result(self):
RateChecks(ref_rate),
ref_rate=ref_rate,
parent_has_failed_or_skipped_result=True,
+ target_start_date=date.today(),
)
assert result == AlignmentReportCheckStatus.SKIPPED
diff --git a/reference_documents/tests/checks/test_ref_order_numbers.py b/reference_documents/tests/checks/test_ref_order_numbers.py
index cc2c0d2bc..478320495 100644
--- a/reference_documents/tests/checks/test_ref_order_numbers.py
+++ b/reference_documents/tests/checks/test_ref_order_numbers.py
@@ -62,7 +62,7 @@ def test_run_check_fail_no_order_validity_range(self):
target = OrderNumberChecks(ref_order_number=ref_order_number)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- f"order number {tap_order_number.order_number} cant be checked, no validity date range",
+ f"order number {tap_order_number.order_number} cant be checked, no validity date range provided on reference document data",
)
def test_run_check_fail_order_number_does_not_exist(self):
@@ -83,5 +83,5 @@ def test_run_check_fail_order_number_does_not_exist(self):
target = OrderNumberChecks(ref_order_number=ref_order_number)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- f"order number not found matching {ref_order_number.order_number}",
+ f"order number not found matching {ref_order_number.order_number} validity {valid_between}",
)
diff --git a/reference_documents/tests/checks/test_ref_quota_definitions.py b/reference_documents/tests/checks/test_ref_quota_definitions.py
index 4e6ba8c43..d2f00dd77 100644
--- a/reference_documents/tests/checks/test_ref_quota_definitions.py
+++ b/reference_documents/tests/checks/test_ref_quota_definitions.py
@@ -60,6 +60,7 @@ def test_run_check_passed(self):
goods_nomenclature=tap_goods_nomenclature,
order_number=tap_quota_definition.order_number,
geographical_area__area_id=area_id,
+ geographical_area__valid_between=valid_between,
transaction=tap_approved_transaction,
)
@@ -142,7 +143,9 @@ def test_run_check_fails_no_measure(self):
target = QuotaDefinitionChecks(ref_quota_definition=ref_quota_definition)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - measure(s) spanning whole quota definition period not found",
+ f"FAIL - measure(s) spanning whole quota definition period not found "
+ f"for quota definition with order number {ref_quota_definition.ref_order_number.order_number} "
+ f"and validity {valid_between} ",
)
def test_run_check_fails_no_quota_definition(self):
@@ -169,7 +172,7 @@ def test_run_check_fails_no_quota_definition(self):
target = QuotaDefinitionChecks(ref_quota_definition=ref_quota_definition)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - quota definition not found",
+ f"FAIL - quota definition for order number {ref_quota_definition.ref_order_number.order_number} and validity {valid_between} not found",
)
def test_run_check_fails_no_goods_nomenclature(self):
@@ -191,7 +194,7 @@ def test_run_check_fails_no_goods_nomenclature(self):
target = QuotaDefinitionChecks(ref_quota_definition=ref_quota_definition)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - commodity code not found",
+ f"FAIL - commodity code {ref_quota_definition.commodity_code} not found",
)
def test_run_check_fail_duty_sentence(self):
@@ -231,11 +234,14 @@ def test_run_check_fail_duty_sentence(self):
goods_nomenclature=tap_goods_nomenclature,
order_number=tap_quota_definition.order_number,
geographical_area__area_id=area_id,
+ geographical_area__valid_between=valid_between,
transaction=tap_approved_transaction,
)
target = QuotaDefinitionChecks(ref_quota_definition=ref_quota_definition)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - duty rate does not match, expected wonky duty rate to be in ()",
+ f"FAIL - duty rate does not match, expected wonky duty rate to be in () for quota "
+ f"definition with order number {ref_quota_definition.ref_order_number.order_number} "
+ f"and validity {valid_between} ",
)
diff --git a/reference_documents/tests/checks/test_ref_quota_suspensions.py b/reference_documents/tests/checks/test_ref_quota_suspensions.py
index 44f77e81e..d5a909a59 100644
--- a/reference_documents/tests/checks/test_ref_quota_suspensions.py
+++ b/reference_documents/tests/checks/test_ref_quota_suspensions.py
@@ -112,7 +112,8 @@ def test_run_check_failed_no_suspension(self):
target = QuotaSuspensionChecks(ref_quota_suspension=ref_quota_suspension)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - quota suspension not found",
+ f"FAIL - quota suspension not found for quota linked to order number {ref_quota_definition.ref_order_number.order_number} "
+ f"and quota validity {valid_between} ",
)
def test_run_check_failed_suspension_valid_between_different(self):
@@ -169,5 +170,6 @@ def test_run_check_failed_suspension_valid_between_different(self):
target = QuotaSuspensionChecks(ref_quota_suspension=ref_quota_suspension)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- "FAIL - quota suspension not found",
+ f"FAIL - quota suspension not found for quota linked to order number {ref_quota_definition.ref_order_number.order_number}"
+ f" and quota validity {valid_between} ",
)
diff --git a/reference_documents/tests/checks/test_ref_rates.py b/reference_documents/tests/checks/test_ref_rates.py
index 715dfb8fa..820ea502e 100644
--- a/reference_documents/tests/checks/test_ref_rates.py
+++ b/reference_documents/tests/checks/test_ref_rates.py
@@ -43,6 +43,7 @@ def test_run_check_pass(self):
valid_between=valid_between,
goods_nomenclature=tap_goods_nomenclature,
geographical_area__area_id=area_id,
+ geographical_area__valid_between=TaricDateRange(date(2000, 1, 1)),
)
tap_duty_expression = DutyExpressionFactory.create(
@@ -61,7 +62,10 @@ def test_run_check_pass(self):
)
target = RateChecks(ref_rate=ref_rate)
- assert target.run_check() == (AlignmentReportCheckStatus.PASS, "")
+ assert target.run_check() == (
+ AlignmentReportCheckStatus.PASS,
+ f"{ref_rate.commodity_code} {valid_between}: rate for commodity code matched",
+ )
def test_run_check_fail_no_comm_code(self):
valid_between = TaricDateRange(date(2020, 1, 1), date(2020, 12, 31))
@@ -81,7 +85,7 @@ def test_run_check_fail_no_comm_code(self):
target = RateChecks(ref_rate=ref_rate)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- f"{ref_rate.commodity_code} None comm code not live",
+ f"Rate {ref_rate.commodity_code} {ref_rate.valid_between}: commodity code not found for period.",
)
def test_run_check_pass_but_defined_on_child_com_codes(self):
@@ -123,6 +127,7 @@ def test_run_check_pass_but_defined_on_child_com_codes(self):
tap_geo_area = GeographicalAreaFactory.create(
area_id=area_id,
+ valid_between=TaricDateRange(date(2010, 1, 1)),
)
# Child_1
@@ -174,7 +179,7 @@ def test_run_check_pass_but_defined_on_child_com_codes(self):
target = RateChecks(ref_rate=ref_rate)
assert target.run_check() == (
AlignmentReportCheckStatus.PASS,
- f"{comm_code} : matched with children",
+ f"Rate {comm_code} {valid_between}: matched (against commodity code children)",
)
def test_run_check_fai_partially_defined_on_child_com_code(self):
@@ -244,7 +249,7 @@ def test_run_check_fai_partially_defined_on_child_com_code(self):
target = RateChecks(ref_rate=ref_rate)
assert target.run_check() == (
AlignmentReportCheckStatus.FAIL,
- f"{comm_code} : no expected measures found on good code or children",
+ f"Rate {comm_code} {valid_between}: no expected measures found on good code or children",
)
def test_run_check_warning_multiple_matches(self):
@@ -269,6 +274,7 @@ def test_run_check_warning_multiple_matches(self):
tap_geo_area = GeographicalAreaFactory.create(
area_id=area_id,
+ valid_between=TaricDateRange(date(2010, 1, 1)),
)
tap_measure = MeasureFactory.create(
@@ -309,5 +315,66 @@ def test_run_check_warning_multiple_matches(self):
target = RateChecks(ref_rate=ref_rate)
assert target.run_check() == (
AlignmentReportCheckStatus.WARNING,
- f"{ref_rate.commodity_code} : multiple measures match",
+ f"Rate {ref_rate.commodity_code} {valid_between} : multiple measures match",
+ )
+
+ def test_run_check_match_parent(self):
+ valid_between = TaricDateRange(date(2020, 1, 1), date(2020, 12, 31))
+ area_id = "ZZ"
+
+ # setup ref doc & version
+ ref_doc_ver = factories.ReferenceDocumentVersionFactory.create(
+ reference_document__area_id=area_id,
+ )
+
+ ref_rate = RefRateFactory.create(
+ duty_rate="12%",
+ commodity_code="0101010000",
+ reference_document_version=ref_doc_ver,
+ valid_between=valid_between,
+ )
+
+ tap_goods_nomenclature = GoodsNomenclatureFactory.create(
+ item_id="0101010000",
+ valid_between=valid_between,
+ indent__indent=2,
+ )
+
+ tap_goods_nomenclature_parent = GoodsNomenclatureFactory.create(
+ item_id="0101000000",
+ valid_between=valid_between,
+ indent__indent=1,
+ )
+
+ tap_geo_area = GeographicalAreaFactory.create(
+ area_id=area_id,
+ valid_between=TaricDateRange(date(2010, 1, 1)),
+ )
+
+ tap_measure = MeasureFactory.create(
+ measure_type__sid=142,
+ valid_between=valid_between,
+ goods_nomenclature=tap_goods_nomenclature_parent,
+ geographical_area=tap_geo_area,
+ )
+
+ tap_duty_expression = DutyExpressionFactory.create(
+ duty_amount_applicability_code=1,
+ valid_between=TaricDateRange(date(2000, 1, 1)),
+ prefix="",
+ measurement_unit_applicability_code=0,
+ monetary_unit_applicability_code=0,
+ description="% or amount",
+ )
+
+ tap_measure_component = MeasureComponentFactory.create(
+ component_measure=tap_measure,
+ duty_amount=12.0,
+ duty_expression=tap_duty_expression,
+ )
+
+ target = RateChecks(ref_rate=ref_rate)
+ assert target.run_check() == (
+ AlignmentReportCheckStatus.PASS,
+ f"Rate {ref_rate.commodity_code} {valid_between}: matched (against commodity code parent)",
)
diff --git a/reference_documents/tests/factories.py b/reference_documents/tests/factories.py
index d395909cd..a2169e6dd 100644
--- a/reference_documents/tests/factories.py
+++ b/reference_documents/tests/factories.py
@@ -11,6 +11,7 @@
from common.util import TaricDateRange
from reference_documents.models import AlignmentReportCheckStatus
from reference_documents.models import AlignmentReportStatus
+from reference_documents.models import ReferenceDocumentCsvUploadStatus
from reference_documents.models import ReferenceDocumentVersionStatus
@@ -288,3 +289,26 @@ class Meta:
status = AlignmentReportCheckStatus.PASS
message = FuzzyText(length=10)
ref_rate = None
+ target_start_date = datetime.now() + timedelta(days=-1)
+
+
+class CSVUploadFactory(factory.django.DjangoModelFactory):
+ class Meta:
+ model = "reference_documents.CSVUpload"
+
+ error_details = ""
+ status = ReferenceDocumentCsvUploadStatus.PENDING
+ preferential_rates_csv_data = ""
+ order_number_csv_data = ""
+ quota_definition_csv_data = ""
+
+ class Params:
+ errored = factory.Trait(
+ status=ReferenceDocumentCsvUploadStatus.ERRORED,
+ )
+ complete = factory.Trait(
+ status=ReferenceDocumentCsvUploadStatus.COMPLETE,
+ )
+ processing = factory.Trait(
+ status=ReferenceDocumentCsvUploadStatus.PROCESSING,
+ )
diff --git a/reference_documents/tests/support/test_order_numbers.csv b/reference_documents/tests/support/test_order_numbers.csv
new file mode 100644
index 000000000..73a1bdae8
--- /dev/null
+++ b/reference_documents/tests/support/test_order_numbers.csv
@@ -0,0 +1,4 @@
+order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,NZ,1.0
+059002,2023-01-01,,059001,1.3,EQ,NZ,1.0
+059003,2023-01-01,2024-01-01,,,,NZ,1.0
\ No newline at end of file
diff --git a/reference_documents/tests/support/test_preferential_rates.csv b/reference_documents/tests/support/test_preferential_rates.csv
new file mode 100644
index 000000000..cbe490ba1
--- /dev/null
+++ b/reference_documents/tests/support/test_preferential_rates.csv
@@ -0,0 +1,3 @@
+comm_code,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-01,,NZ,1.0
+0200000000,5.00% + 147.00 GBP / 100 kg,2024-01-01,2028-12-31,NZ,1.0
\ No newline at end of file
diff --git a/reference_documents/tests/support/test_quota_definitions.csv b/reference_documents/tests/support/test_quota_definitions.csv
new file mode 100644
index 000000000..19d552c9e
--- /dev/null
+++ b/reference_documents/tests/support/test_quota_definitions.csv
@@ -0,0 +1,4 @@
+order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.0
+059001,0100000000,0.00%,400,tonnes,2024-01-01,2024-12-31,NZ,1.0
+059001,0100000000,0.00%,400,tonnes,2025-01-01,,NZ,1.0
\ No newline at end of file
diff --git a/reference_documents/tests/test_alignment_report_model.py b/reference_documents/tests/test_alignment_report_model.py
index d078805c9..dc945db1a 100644
--- a/reference_documents/tests/test_alignment_report_model.py
+++ b/reference_documents/tests/test_alignment_report_model.py
@@ -1,3 +1,6 @@
+import datetime
+from datetime import timedelta
+
import django_fsm
import pytest
@@ -91,7 +94,7 @@ def test_state_transition_from_processing(self):
def test_unique_check_names_default(self):
target = factories.AlignmentReportFactory()
- assert list(target.unique_check_names()) == []
+ assert list(target.unique_check_names(datetime.date.today().year)) == []
def test_unique_check_names_populated(self):
target = factories.AlignmentReportFactory()
@@ -99,10 +102,28 @@ def test_unique_check_names_populated(self):
check_name="test1",
alignment_report=target,
status=AlignmentReportCheckStatus.PASS,
+ target_start_date=datetime.date.today(),
)
- AlignmentReportCheckFactory(check_name="test2", alignment_report=target)
- AlignmentReportCheckFactory(check_name="test3", alignment_report=target)
- assert list(target.unique_check_names()) == ["test1", "test2", "test3"]
+ AlignmentReportCheckFactory(
+ check_name="test2",
+ alignment_report=target,
+ target_start_date=datetime.date.today(),
+ )
+ AlignmentReportCheckFactory(
+ check_name="test3",
+ alignment_report=target,
+ target_start_date=datetime.date.today() + timedelta(days=365),
+ )
+ AlignmentReportCheckFactory(
+ check_name="test4",
+ alignment_report=target,
+ target_start_date=datetime.date.today(),
+ )
+ assert list(target.unique_check_names(datetime.date.today().year)) == [
+ "test1",
+ "test2",
+ "test4",
+ ]
def test_check_stats_default(self):
target = factories.AlignmentReportFactory()
@@ -129,17 +150,26 @@ def test_check_stats_populated(self):
check_name="test1",
alignment_report=target,
status=AlignmentReportCheckStatus.SKIPPED,
+ target_start_date=datetime.date.today(),
+ )
+ AlignmentReportCheckFactory(
+ check_name="test2",
+ alignment_report=target,
+ target_start_date=datetime.date.today(),
+ )
+ AlignmentReportCheckFactory(
+ check_name="test3",
+ alignment_report=target,
+ target_start_date=datetime.date.today(),
)
- AlignmentReportCheckFactory(check_name="test2", alignment_report=target)
- AlignmentReportCheckFactory(check_name="test3", alignment_report=target)
stats = target.check_stats()
- assert stats["test1"]["total"] == 4
- assert stats["test1"]["failed"] == 1
- assert stats["test1"]["passed"] == 1
- assert stats["test1"]["warning"] == 1
- assert stats["test1"]["skipped"] == 1
- assert stats["test2"]["total"] == 1
- assert stats["test3"]["total"] == 1
+ assert stats[f"test1 {datetime.date.today().year}"]["total"] == 4
+ assert stats[f"test1 {datetime.date.today().year}"]["failed"] == 1
+ assert stats[f"test1 {datetime.date.today().year}"]["passed"] == 1
+ assert stats[f"test1 {datetime.date.today().year}"]["warning"] == 1
+ assert stats[f"test1 {datetime.date.today().year}"]["skipped"] == 1
+ assert stats[f"test2 {datetime.date.today().year}"]["total"] == 1
+ assert stats[f"test3 {datetime.date.today().year}"]["total"] == 1
def test_error_count(self):
target = factories.AlignmentReportFactory()
@@ -192,3 +222,21 @@ def test_warning_count(self):
AlignmentReportCheckFactory(check_name="test2", alignment_report=target)
AlignmentReportCheckFactory(check_name="test3", alignment_report=target)
assert target.warning_count() == 1
+
+ def test_target_start_date_years(self):
+ target = factories.AlignmentReportFactory()
+
+ for i in range(20):
+ factories.AlignmentReportCheckFactory.create(
+ alignment_report=target,
+ status=AlignmentReportCheckStatus.PASS,
+ target_start_date=datetime.date(
+ datetime.date.today().year + (i - 5),
+ 1,
+ 1,
+ ),
+ )
+
+ assert datetime.date.today().year - 5 in target.target_start_date_years()
+ assert datetime.date.today().year + 14 in target.target_start_date_years()
+ assert len(target.target_start_date_years()) == 20
diff --git a/reference_documents/tests/test_csv_upload_model.py b/reference_documents/tests/test_csv_upload_model.py
new file mode 100644
index 000000000..a47d4d880
--- /dev/null
+++ b/reference_documents/tests/test_csv_upload_model.py
@@ -0,0 +1,44 @@
+import pytest
+
+from reference_documents.models import CSVUpload
+from reference_documents.tests.factories import CSVUploadFactory
+
+pytestmark = pytest.mark.django_db
+
+
+@pytest.mark.reference_documents
+class TestCsvUpload:
+ def test_init(self):
+ target = CSVUpload()
+
+ assert target.error_details is None
+ assert target.status == "PENDING"
+ assert target.preferential_rates_csv_data is None
+ assert target.order_number_csv_data is None
+ assert target.quota_definition_csv_data is None
+
+ def test_errored(self):
+ target = CSVUploadFactory.create(processing=True)
+ target.errored()
+ assert target.status == "ERRORED"
+
+ def test_processing(self):
+ target = CSVUploadFactory.create()
+ target.processing()
+ assert target.status == "PROCESSING"
+
+ def test_completed(self):
+ target = CSVUploadFactory.create(processing=True)
+ target.completed()
+ assert target.status == "COMPLETE"
+
+ def test_csv_content_types(self):
+ target = CSVUploadFactory.create(
+ preferential_rates_csv_data="some data",
+ order_number_csv_data="some data",
+ quota_definition_csv_data="some data",
+ )
+ assert (
+ target.csv_content_types()
+ == "Preferential rates, Order numbers, Quota definitions"
+ )
diff --git a/reference_documents/tests/test_csv_upload_views.py b/reference_documents/tests/test_csv_upload_views.py
new file mode 100644
index 000000000..61975b964
--- /dev/null
+++ b/reference_documents/tests/test_csv_upload_views.py
@@ -0,0 +1,146 @@
+import os
+
+import pytest
+from django.urls import reverse
+
+from reference_documents.tests.factories import CSVUploadFactory
+
+pytestmark = pytest.mark.django_db
+
+
+def open_support_file(file_name, from_file):
+ path_to_current_file = os.path.realpath(from_file)
+ current_directory = os.path.split(path_to_current_file)[0]
+ return open(os.path.join(current_directory, "support", file_name), "r")
+
+
+@pytest.mark.reference_documents
+class TestReferenceDocumentCsvUploadList:
+ def test_get_without_permissions(self, valid_user_client):
+ resp = valid_user_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-index",
+ ),
+ )
+ assert resp.status_code == 403
+
+ def test_get_with_permissions(self, superuser_client):
+ resp = superuser_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-index",
+ ),
+ )
+ assert resp.status_code == 200
+
+
+@pytest.mark.reference_documents
+class TestReferenceDocumentCsvUploadDetails:
+ def test_get_without_permissions(self, valid_user_client):
+ csv_upload = CSVUploadFactory.create()
+
+ resp = valid_user_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload-details",
+ kwargs={"pk": csv_upload.pk},
+ ),
+ )
+ assert resp.status_code == 403
+
+ def test_get_with_permissions(self, superuser_client):
+ csv_upload = CSVUploadFactory.create()
+
+ resp = superuser_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload-details",
+ kwargs={"pk": csv_upload.pk},
+ ),
+ )
+ assert resp.status_code == 200
+
+
+@pytest.mark.reference_documents
+class TestReferenceDocumentCsvUploadCreate:
+ def test_get_without_permissions(self, valid_user_client):
+ resp = valid_user_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload",
+ ),
+ )
+ assert resp.status_code == 403
+
+ def test_get_with_permissions(self, superuser_client):
+ resp = superuser_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload",
+ ),
+ )
+ assert resp.status_code == 200
+
+ def test_post_without_permissions(self, valid_user_client):
+ preferential_rates_csv_file = open_support_file(
+ "test_preferential_rates.csv",
+ __file__,
+ )
+ order_number_csv_file = open_support_file("test_order_numbers.csv", __file__)
+ quota_definition_csv_file = open_support_file(
+ "test_quota_definitions.csv",
+ __file__,
+ )
+
+ post_data = {
+ "preferential_rates_csv_data": preferential_rates_csv_file,
+ "order_number_csv_data": order_number_csv_file,
+ "quota_definition_csv_data": quota_definition_csv_file,
+ }
+
+ resp = valid_user_client.post(
+ reverse(
+ "reference_documents:reference-document-csv-upload",
+ ),
+ post_data,
+ )
+ assert resp.status_code == 403
+
+ def test_post_with_permissions(self, superuser_client):
+ preferential_rates_csv_file = open_support_file(
+ "test_preferential_rates.csv",
+ __file__,
+ )
+ order_number_csv_file = open_support_file("test_order_numbers.csv", __file__)
+ quota_definition_csv_file = open_support_file(
+ "test_quota_definitions.csv",
+ __file__,
+ )
+
+ post_data = {
+ "preferential_rates_csv_data": preferential_rates_csv_file,
+ "order_number_csv_data": order_number_csv_file,
+ "quota_definition_csv_data": quota_definition_csv_file,
+ }
+
+ resp = superuser_client.post(
+ reverse(
+ "reference_documents:reference-document-csv-upload",
+ ),
+ post_data,
+ )
+ assert resp.status_code == 200
+
+
+@pytest.mark.reference_documents
+class TestReferenceDocumentCsvUploadCreateSuccess:
+ def test_get_without_permissions(self, valid_user_client):
+ resp = valid_user_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload-success",
+ ),
+ )
+ assert resp.status_code == 403
+
+ def test_get_with_permissions(self, superuser_client):
+ resp = superuser_client.get(
+ reverse(
+ "reference_documents:reference-document-csv-upload-success",
+ ),
+ )
+ assert resp.status_code == 200
diff --git a/reference_documents/tests/test_importer.py b/reference_documents/tests/test_importer.py
new file mode 100644
index 000000000..3d523d121
--- /dev/null
+++ b/reference_documents/tests/test_importer.py
@@ -0,0 +1,324 @@
+import pytest
+
+from common.tests.factories import GeographicalAreaFactory
+from reference_documents.csv_importer.importer import ReferenceDocumentCSVImporter
+from reference_documents.models import ReferenceDocumentCsvUploadStatus
+from reference_documents.tests.factories import CSVUploadFactory
+from reference_documents.tests.factories import ReferenceDocumentVersionFactory
+
+pytestmark = pytest.mark.django_db
+
+
+# preferential rates CSV data
+def mock_preferential_rates_csv_data():
+ return """comm_code,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-01,,NZ,1.0
+0200000000,5.00% + 147.00 GBP / 100 kg,2024-01-01,2028-12-31,NZ,1.0"""
+
+
+def mock_preferential_rates_csv_data_invalid_date():
+ return """comm_code,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-32,,NZ,1.0"""
+
+
+def mock_preferential_rates_csv_data_invalid_area_id():
+ return """comm_code,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-01,,XX,1.0"""
+
+
+def mock_preferential_rates_csv_data_invalid_comm_code():
+ return """comm_code,rate,validity_start,validity_end,area_id,document_version
+ABC,0.00%,2024-01-01,,NZ,1.0"""
+
+
+def mock_preferential_rates_csv_data_invalid_document_version():
+ return """comm_code,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-01,,NZ,4.z"""
+
+
+def mock_preferential_rates_csv_data_invalid_headers():
+ return """aa,rate,validity_start,validity_end,area_id,document_version
+0100000000,0.00%,2024-01-01,,NZ,4.4"""
+
+
+# order number CSV data
+def mock_order_number_csv_data():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,NZ,1.0
+059002,2023-01-01,,059001,1.3,EQ,NZ,1.0
+059003,2023-01-01,2024-01-01,,,,NZ,1.0"""
+
+
+def mock_order_number_csv_data_invalid_date():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-41,,,,,NZ,1.0"""
+
+
+def mock_order_number_csv_data_invalid_area_id():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,AA,1.0"""
+
+
+def mock_order_number_csv_data_invalid_document_version():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,NZ,1.a"""
+
+
+def mock_order_number_csv_data_invalid_headers():
+ return """banana,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,NZ,1.0"""
+
+
+def mock_order_number_already_exists_csv_data():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059001,2023-01-01,,,,,NZ,1.0
+059001,2023-01-01,,,,,NZ,1.0"""
+
+
+def mock_order_number_parent_does_not_exist_csv_data():
+ return """order_number,validity_start,validity_end,parent_order_number,coefficient,relationship_type,area_id,document_version,
+059002,2023-01-01,,059001,1.3,EQ,NZ,1.0"""
+
+
+# Quota definition CSV data
+def mock_quota_definition_csv_data():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.0
+059001,0100000000,0.00%,400,tonnes,2024-01-01,2024-12-31,NZ,1.0
+059001,0100000000,0.00%,400,tonnes,2025-01-01,,NZ,1.0"""
+
+
+def mock_quota_definition_csv_data_invalid_date():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-41,2023-12-31,NZ,1.0"""
+
+
+def mock_quota_definition_csv_data_invalid_area_id():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-01,2023-12-31,AA,1.0"""
+
+
+def mock_quota_definition_csv_data_invalid_comm_code_content():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,AAAAAAAAAA,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.0"""
+
+
+def mock_quota_definition_csv_data_invalid_comm_code_length():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,010000000,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.0"""
+
+
+def mock_quota_definition_csv_data_invalid_document_version():
+ return """order_number,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.z"""
+
+
+def mock_quota_definition_csv_data_invalid_headers():
+ return """boop,comm_code,duty_rate,initial_volume,measurement,validity_start,validity_end,area_id,document_version
+059001,0100000000,0.00%,200,tonnes,2023-01-01,2023-12-31,NZ,1.0"""
+
+
+@pytest.mark.reference_documents
+class TestReferenceDocumentCSVImporter:
+ def test_init(self):
+ csv_upload = CSVUploadFactory.create()
+ target = ReferenceDocumentCSVImporter(csv_upload)
+
+ assert target.csv_upload == csv_upload
+
+ def test_run_empty_csv_upload_marks_csv_upload_as_errored(self):
+ csv_upload = CSVUploadFactory.create()
+ target = ReferenceDocumentCSVImporter(csv_upload)
+ target.run()
+ assert csv_upload.status == ReferenceDocumentCsvUploadStatus.ERRORED
+ assert csv_upload.error_details == "No CSV data to process, exiting."
+
+ @pytest.mark.parametrize(
+ "preferential_rates_csv_data,order_number_csv_data,quota_definition_csv_data,expected_status,error_details_contains",
+ [
+ # preferential rates CSV data
+ (
+ mock_preferential_rates_csv_data(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.COMPLETE,
+ "",
+ ),
+ (
+ mock_preferential_rates_csv_data_invalid_date(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValidationError:ā%(value)sā value has the correct format (YYYY-MM-DD) but it is an invalid date.",
+ ),
+ (
+ mock_preferential_rates_csv_data_invalid_area_id(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:Area ID does not exist in TAP data: XX",
+ ),
+ (
+ mock_preferential_rates_csv_data_invalid_comm_code(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:ABC is not a valid comm code, it can only contain numbers",
+ ),
+ (
+ mock_preferential_rates_csv_data_invalid_document_version(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:could not convert string to float: '4.z'",
+ ),
+ (
+ mock_preferential_rates_csv_data_invalid_headers(),
+ None,
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:CSV data for preferential rates missing header comm_code",
+ ),
+ # Order Number CSV data
+ (
+ None,
+ mock_order_number_csv_data(),
+ None,
+ ReferenceDocumentCsvUploadStatus.COMPLETE,
+ "",
+ ),
+ (
+ None,
+ mock_order_number_csv_data_invalid_date(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:day is out of range for month",
+ ),
+ (
+ None,
+ mock_order_number_csv_data_invalid_area_id(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:Area ID does not exist in TAP data: AA",
+ ),
+ (
+ None,
+ mock_order_number_csv_data_invalid_document_version(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:could not convert string to float: '1.a'",
+ ),
+ (
+ None,
+ mock_order_number_csv_data_invalid_headers(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:CSV data for order numbers missing header order_number",
+ ),
+ (
+ None,
+ mock_order_number_already_exists_csv_data(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "Exception:Order Number already exists, details : {'order_number': '059001', 'validity_start': '2023-01-01', 'validity_end': '', 'parent_order_number': '', 'coefficient': '', 'relationship_type': '', 'area_id': 'NZ', 'document_version': '1.0', '': None}, matched on order number and start_date.",
+ ),
+ (
+ None,
+ mock_order_number_parent_does_not_exist_csv_data(),
+ None,
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "Exception:Parent Order Number 059001 does not exist.",
+ ),
+ # Quota Definition CSV data
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data(),
+ ReferenceDocumentCsvUploadStatus.COMPLETE,
+ "",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_date(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValidationError:ā%(value)sā value has the correct format (YYYY-MM-DD) but it is an invalid date.",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_area_id(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:Area ID does not exist in TAP data: AA",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_comm_code_content(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:AAAAAAAAAA is not a valid comm code, it can only contain numbers",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_comm_code_length(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:010000000 is not a valid comm code, it should be 10 characters long",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_document_version(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:could not convert string to float: '1.z'",
+ ),
+ (
+ None,
+ mock_order_number_csv_data(),
+ mock_quota_definition_csv_data_invalid_headers(),
+ ReferenceDocumentCsvUploadStatus.ERRORED,
+ "ValueError:CSV data for quota definitions missing header order_number",
+ ),
+ ],
+ )
+ def test_run_csv_upload_with_preferential_rates_csv_data(
+ self,
+ preferential_rates_csv_data,
+ order_number_csv_data,
+ quota_definition_csv_data,
+ expected_status,
+ error_details_contains,
+ ):
+ csv_upload = CSVUploadFactory.create(
+ preferential_rates_csv_data=preferential_rates_csv_data,
+ order_number_csv_data=order_number_csv_data,
+ quota_definition_csv_data=quota_definition_csv_data,
+ )
+ # add geoarea
+ GeographicalAreaFactory.create(area_id="NZ")
+ target = ReferenceDocumentCSVImporter(csv_upload)
+
+ target.run()
+ assert csv_upload.status == expected_status
+ assert csv_upload.error_details == error_details_contains
+
+ def test_fails_when_ref_doc_version_not_editable(self):
+ csv_upload = CSVUploadFactory.create(
+ preferential_rates_csv_data=mock_preferential_rates_csv_data(),
+ order_number_csv_data=mock_order_number_csv_data(),
+ quota_definition_csv_data=mock_quota_definition_csv_data(),
+ )
+ # add geoarea
+ GeographicalAreaFactory.create(area_id="NZ")
+ ReferenceDocumentVersionFactory.create(
+ reference_document__area_id="NZ",
+ published=True,
+ version="1.0",
+ )
+ target = ReferenceDocumentCSVImporter(csv_upload)
+ target.run()
+ assert csv_upload.status == ReferenceDocumentCsvUploadStatus.ERRORED
+ assert (
+ csv_upload.error_details
+ == "Exception:Reference document version NZ:1.0 has status PUBLISHED and can not be altered."
+ )
diff --git a/reference_documents/urls.py b/reference_documents/urls.py
index 81d8aba7b..3e08f38b0 100644
--- a/reference_documents/urls.py
+++ b/reference_documents/urls.py
@@ -2,6 +2,9 @@
from rest_framework import routers
from reference_documents.views.alignment_report_views import AlignmentReportDetails
+from reference_documents.views.alignment_report_views import (
+ AlignmentReportRerunCheckDetails,
+)
from reference_documents.views.order_number_views import RefOrderNumberCreate
from reference_documents.views.order_number_views import RefOrderNumberDelete
from reference_documents.views.order_number_views import RefOrderNumberEdit
@@ -36,6 +39,18 @@
from reference_documents.views.rate_views import RefRateCreate
from reference_documents.views.rate_views import RefRateDelete
from reference_documents.views.rate_views import RefRateEdit
+from reference_documents.views.reference_document_csv_upload import (
+ ReferenceDocumentCsvUploadCreate,
+)
+from reference_documents.views.reference_document_csv_upload import (
+ ReferenceDocumentCsvUploadCreateSuccess,
+)
+from reference_documents.views.reference_document_csv_upload import (
+ ReferenceDocumentCsvUploadDetails,
+)
+from reference_documents.views.reference_document_csv_upload import (
+ ReferenceDocumentCsvUploadList,
+)
from reference_documents.views.reference_document_version_views import (
ReferenceDocumentVersionAlignmentCheck,
)
@@ -104,6 +119,28 @@
ReferenceDocumentCreate.as_view(),
name="create",
),
+ # reference document data CSV Upload
+ path(
+ "reference_documents/csv_uploads/",
+ ReferenceDocumentCsvUploadList.as_view(),
+ name="reference-document-csv-index",
+ ),
+ path(
+ "reference_documents/csv_uploads/
'
+ )
row_data = [
{
"text": alignment_report_check.check_name,
},
+ {
+ "text": alignment_report_check.updated_at,
+ },
+ {
+ "text": alignment_report_check.target_start_date.year,
+ },
{
"text": alignment_report_check.message,
},
{
"text": alignment_report_check.status,
},
+ {
+ "html": actions,
+ },
]
rows.append(row_data)
@@ -57,3 +94,77 @@ def get_context_data(self, *args, **kwargs):
context["alignment_check_table_headers"] = alignment_report_ctx.headers()
context["alignment_check_table_rows"] = alignment_report_ctx.rows()
return context
+
+
+class AlignmentReportRerunCheckDetails(PermissionRequiredMixin, DetailView):
+ template_name = "reference_documents/alignment_reports/rerun_check.jinja"
+ permission_required = "reference_documents.view_view_alignmentreport"
+ model = AlignmentReportCheck
+
+ def get(self, request, *args, **kwargs):
+ self.object = self.get_object()
+ context = self.get_context_data(object=self.object)
+
+ # get check
+ check_class = self.get_check_class_by_name(self.object.check_name)
+ args = {}
+ if self.object.ref_rate:
+ args["ref_rate"] = self.object.ref_rate
+
+ if self.object.ref_order_number:
+ args["ref_order_number"] = self.object.ref_order_number
+
+ if self.object.ref_quota_definition:
+ args["ref_quota_definition"] = self.object.ref_quota_definition
+
+ if self.object.ref_quota_suspension:
+ args["ref_quota_suspension"] = self.object.ref_quota_suspension
+
+ check = check_class(**args)
+
+ status, message = check.run_check()
+
+ self.object.status = status
+ self.object.message = message
+ self.object.save()
+
+ return redirect(
+ "reference_documents:alignment-report-details",
+ version_pk=self.object.alignment_report.reference_document_version.pk,
+ pk=self.object.alignment_report.pk,
+ )
+
+ def get_context_data(self, *args, **kwargs):
+ context = super(AlignmentReportRerunCheckDetails, self).get_context_data(
+ *args,
+ **kwargs,
+ )
+
+ # row data
+ context["reference_document_version"] = kwargs[
+ "object"
+ ].alignment_report.reference_document_version
+ return context
+
+ def get_check_class_by_name(self, name):
+ for ref_rate_check in Checks.get_checks_for(BaseRateCheck):
+ if ref_rate_check.name == name:
+ return ref_rate_check
+
+ for order_number_check in Checks.get_checks_for(BaseOrderNumberCheck):
+ if order_number_check.name == name:
+ return order_number_check
+
+ for quota_definition_check in Checks.get_checks_for(
+ BaseQuotaDefinitionCheck,
+ ):
+ if quota_definition_check.name == name:
+ return quota_definition_check
+
+ for quota_suspension_check in Checks.get_checks_for(
+ BaseQuotaSuspensionCheck,
+ ):
+ if quota_suspension_check.name == name:
+ return quota_suspension_check
+
+ return None
diff --git a/reference_documents/views/reference_document_csv_upload.py b/reference_documents/views/reference_document_csv_upload.py
new file mode 100644
index 000000000..3d6731a94
--- /dev/null
+++ b/reference_documents/views/reference_document_csv_upload.py
@@ -0,0 +1,103 @@
+from django.contrib.auth.mixins import PermissionRequiredMixin
+from django.http import HttpResponseRedirect
+from django.views.generic import CreateView
+from django.views.generic import DetailView
+from django.views.generic import ListView
+from django.views.generic import TemplateView
+
+from common.views import WithPaginationListMixin
+from reference_documents.forms.reference_document_csv_upload_forms import (
+ ReferenceDocumentCreateCsvUploadForm,
+)
+from reference_documents.models import CSVUpload
+
+
+class ReferenceDocumentCsvUploadContext:
+
+ def __init__(self, object_list):
+ self.object_list = object_list
+
+ def headers(self):
+ return [
+ {"text": "Date and time uploaded"},
+ {"text": "status"},
+ {"text": "CSV data for"},
+ {"text": "Actions"},
+ ]
+
+ def rows(self):
+ csv_uploads = []
+ for csv_upload in self.object_list:
+ actions = f'Details
'
+
+ csv_uploads.append(
+ [
+ {
+ "text": csv_upload.created_at.strftime("%Y/%m/%d, %H:%M:%S"),
+ },
+ {
+ "text": f"{csv_upload.status}",
+ },
+ {
+ "text": f"{csv_upload.csv_content_types()}",
+ },
+ {
+ "html": actions,
+ },
+ ],
+ )
+ return csv_uploads
+
+ def get_context(self):
+ return {
+ "reference_documents": self.rows(),
+ "reference_document_headers": self.headers(),
+ }
+
+
+class ReferenceDocumentCsvUploadList(
+ PermissionRequiredMixin,
+ WithPaginationListMixin,
+ ListView,
+):
+ template_name = "reference_documents/reference_document_csv_upload/index.jinja"
+ permission_required = "reference_documents.view_csvupload"
+ model = CSVUpload
+ paginate_by = 20
+
+ def get_queryset(self):
+ return CSVUpload.objects.all().order_by("-created_at")
+
+ def get_context_data(self, **kwargs):
+ context = super().get_context_data(**kwargs)
+ context.update(
+ ReferenceDocumentCsvUploadContext(
+ context["object_list"],
+ ).get_context(),
+ )
+ return context
+
+
+class ReferenceDocumentCsvUploadDetails(PermissionRequiredMixin, DetailView):
+ template_name = "reference_documents/reference_document_csv_upload/details.jinja"
+ permission_required = "reference_documents.view_csvupload"
+ model = CSVUpload
+
+
+class ReferenceDocumentCsvUploadCreate(PermissionRequiredMixin, CreateView):
+ template_name = "reference_documents/reference_document_csv_upload/create.jinja"
+ permission_required = "reference_documents.add_csvupload"
+ form_class = ReferenceDocumentCreateCsvUploadForm
+ success_url = "/reference_documents/csv_upload_succeeded/"
+
+ def form_valid(self, form):
+ # read files to string
+ form.save()
+ return HttpResponseRedirect(self.success_url)
+
+
+class ReferenceDocumentCsvUploadCreateSuccess(PermissionRequiredMixin, TemplateView):
+ template_name = (
+ "reference_documents/reference_document_csv_upload/create_success.jinja"
+ )
+ permission_required = "reference_documents.add_csvupload"