diff --git a/geonode/geoserver/management/commands/importlayers.py b/geonode/geoserver/management/commands/importlayers.py index b92ac407396..c6eb9e233b3 100644 --- a/geonode/geoserver/management/commands/importlayers.py +++ b/geonode/geoserver/management/commands/importlayers.py @@ -149,6 +149,7 @@ def execute(self): params[name] = os.path.basename(value.name) params["non_interactive"] = 'true' + params["action"] = 'upload' response = client.post( urljoin(self.host, "/api/v2/uploads/upload/"), auth=HTTPBasicAuth(self.username, self.password), diff --git a/geonode/resource/api/tests.py b/geonode/resource/api/tests.py index 1deaa3f9a47..72756f2179a 100644 --- a/geonode/resource/api/tests.py +++ b/geonode/resource/api/tests.py @@ -157,9 +157,7 @@ def test_endpoint_should_raise_error_if_pk_is_not_passed(self): def test_endpoint_should_return_the_source(self): # creating dummy execution request - obj = ExecutionRequest.objects.create( - user=self.superuser, func_name="import_new_resource", action="import", source="upload_workflow" - ) + obj = ExecutionRequest.objects.create(user=self.superuser, func_name="import_new_resource", action="upload") self.client.force_login(self.superuser) _url = f"{reverse('executionrequest-list')}/{obj.exec_id}" diff --git a/geonode/resource/enumerator.py b/geonode/resource/enumerator.py index 8ace0c22100..4d96b0aa291 100644 --- a/geonode/resource/enumerator.py +++ b/geonode/resource/enumerator.py @@ -22,6 +22,7 @@ class ExecutionRequestAction(enum.Enum): IMPORT = _("import") + UPLOAD = _("upload") CREATE = _("create") COPY = _("copy") DELETE = _("delete") diff --git a/geonode/resource/migrations/0009_remove_executionrequest_source_and_more.py b/geonode/resource/migrations/0009_remove_executionrequest_source_and_more.py new file mode 100644 index 00000000000..c547801136b --- /dev/null +++ b/geonode/resource/migrations/0009_remove_executionrequest_source_and_more.py @@ -0,0 +1,37 @@ +# Generated by Django 4.2.9 on 2024-10-18 10:41 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("resource", "0008_executionrequest_source"), + ] + + operations = [ + migrations.RemoveField( + model_name="executionrequest", + name="source", + ), + migrations.AlterField( + model_name="executionrequest", + name="action", + field=models.CharField( + choices=[ + ("import", "import"), + ("upload", "upload"), + ("create", "create"), + ("copy", "copy"), + ("delete", "delete"), + ("permissions", "permissions"), + ("update", "update"), + ("ingest", "ingest"), + ("unknown", "unknown"), + ], + default="unknown", + max_length=50, + null=True, + ), + ), + ] diff --git a/geonode/resource/models.py b/geonode/resource/models.py index 791b9f65d0f..f6993492225 100644 --- a/geonode/resource/models.py +++ b/geonode/resource/models.py @@ -60,5 +60,3 @@ class ExecutionRequest(models.Model): action = models.CharField( max_length=50, choices=ACTION_CHOICES, default=ExecutionRequestAction.UNKNOWN.value, null=True ) - - source = models.CharField(max_length=250, null=True, default=None) diff --git a/geonode/security/tests.py b/geonode/security/tests.py index d2707531bad..2c634ecabea 100644 --- a/geonode/security/tests.py +++ b/geonode/security/tests.py @@ -752,9 +752,7 @@ def test_dataset_permissions(self): bobby = get_user_model().objects.get(username="bobby") self.client.force_login(get_user_model().objects.get(username="admin")) - payload = { - "base_file": open(f"{project_dir}/tests/fixture/valid.geojson", "rb"), - } + payload = {"base_file": open(f"{project_dir}/tests/fixture/valid.geojson", "rb"), "action": "upload"} response = self.client.post(reverse("importer_upload"), data=payload) layer = ResourceHandlerInfo.objects.filter(execution_request=response.json()["execution_id"]).first().resource if layer is None: diff --git a/geonode/settings.py b/geonode/settings.py index 03807b16dbc..2be775af909 100644 --- a/geonode/settings.py +++ b/geonode/settings.py @@ -2240,56 +2240,6 @@ def get_geonode_catalogue_service(): "document_upload", ) -SUPPORTED_DATASET_FILE_TYPES = [ - { - "id": "shp", - "label": "ESRI Shapefile", - "format": "vector", - "ext": ["shp"], - "requires": ["shp", "prj", "dbf", "shx"], - "optional": ["xml", "sld"], - }, - { - "id": "tiff", - "label": "GeoTIFF", - "format": "raster", - "ext": ["tiff", "tif", "geotiff", "geotif"], - "mimeType": ["image/tiff"], - "optional": ["xml", "sld"], - }, - { - "id": "csv", - "label": "Comma Separated Value (CSV)", - "format": "vector", - "ext": ["csv"], - "mimeType": ["text/csv"], - "optional": ["xml", "sld"], - }, - { - "id": "zip", - "label": "Zip Archive", - "format": "archive", - "ext": ["zip"], - "mimeType": ["application/zip"], - "optional": ["xml", "sld"], - }, - { - "id": "xml", - "label": "XML Metadata File", - "format": "metadata", - "ext": ["xml"], - "mimeType": ["application/json"], - "needsFiles": ["shp", "prj", "dbf", "shx", "csv", "tiff", "zip", "sld"], - }, - { - "id": "sld", - "label": "Styled Layer Descriptor (SLD)", - "format": "metadata", - "ext": ["sld"], - "mimeType": ["application/json"], - "needsFiles": ["shp", "prj", "dbf", "shx", "csv", "tiff", "zip", "xml"], - }, -] INSTALLED_APPS += ( "dynamic_models", # "importer", diff --git a/geonode/storage/tests.py b/geonode/storage/tests.py index 43adc794ffa..9a4401c36d5 100644 --- a/geonode/storage/tests.py +++ b/geonode/storage/tests.py @@ -573,23 +573,6 @@ def test_zip_file_should_correctly_index_file_extensions(self): # extensions found more than once get indexed self.assertIsNotNone(_files.get("csv_file_1")) - @override_settings( - SUPPORTED_DATASET_FILE_TYPES=[ - {"id": "kmz", "label": "kmz", "format": "vector", "ext": ["kmz"]}, - {"id": "kml", "label": "kml", "format": "vector", "ext": ["kml"]}, - ] - ) - def test_zip_file_should_correctly_recognize_main_extension_with_kmz(self): - # reinitiate the storage manager with the zip file - storage_manager = self.sut( - remote_files={"base_file": os.path.join(f"{self.project_root}", "tests/data/Italy.kmz")} - ) - storage_manager.clone_remote_files() - - self.assertIsNotNone(storage_manager.data_retriever.temporary_folder) - _files = storage_manager.get_retrieved_paths() - self.assertTrue("doc.kml" in _files.get("base_file"), msg=f"files available: {_files}") - def test_zip_file_should_correctly_recognize_main_extension_with_shp(self): # zipping files storage_manager = self.sut(remote_files=self.local_files_paths) diff --git a/geonode/tests/test_utils.py b/geonode/tests/test_utils.py index e39600b60f2..20ff8b67f4c 100644 --- a/geonode/tests/test_utils.py +++ b/geonode/tests/test_utils.py @@ -18,7 +18,6 @@ ######################################################################### import copy from unittest import TestCase -from django.test import override_settings from unittest.mock import patch from datetime import datetime, timedelta @@ -32,8 +31,7 @@ from geonode.geoserver.helpers import set_attributes from geonode.tests.base import GeoNodeBaseTestSupport from geonode.br.management.commands.utils.utils import ignore_time -from geonode.utils import copy_tree, get_supported_datasets_file_types, bbox_to_wkt -from geonode import settings +from geonode.utils import copy_tree, bbox_to_wkt class TestCopyTree(GeoNodeBaseTestSupport): @@ -205,39 +203,6 @@ def setUp(self): }, ] - @override_settings( - ADDITIONAL_DATASET_FILE_TYPES=[ - {"id": "dummy_type", "label": "Dummy Type", "format": "dummy", "ext": ["dummy"]}, - ] - ) - def test_should_append_additional_type_if_config_is_provided(self): - prev_count = len(settings.SUPPORTED_DATASET_FILE_TYPES) - supported_types = get_supported_datasets_file_types() - supported_keys = [t.get("id") for t in supported_types] - self.assertIn("dummy_type", supported_keys) - self.assertEqual(len(supported_keys), prev_count + 1) - - @override_settings( - ADDITIONAL_DATASET_FILE_TYPES=[ - { - "id": "shp", - "label": "Replaced type", - "format": "vector", - "ext": ["shp"], - "requires": ["shp", "prj", "dbf", "shx"], - "optional": ["xml", "sld"], - }, - ] - ) - def test_should_replace_the_type_id_if_already_exists(self): - prev_count = len(settings.SUPPORTED_DATASET_FILE_TYPES) - supported_types = get_supported_datasets_file_types() - supported_keys = [t.get("id") for t in supported_types] - self.assertIn("shp", supported_keys) - self.assertEqual(len(supported_keys), prev_count) - shp_type = [t for t in supported_types if t["id"] == "shp"][0] - self.assertEqual(shp_type["label"], "Replaced type") - class TestRegionsCrossingDateLine(TestCase): def setUp(self): diff --git a/geonode/upload/api/serializer.py b/geonode/upload/api/serializer.py index 4657ddb9f3d..9d807c44bb1 100644 --- a/geonode/upload/api/serializer.py +++ b/geonode/upload/api/serializer.py @@ -34,7 +34,7 @@ class Meta: "sld_file", "store_spatial_files", "skip_existing_layers", - "source", + "action", ) base_file = serializers.FileField() @@ -42,7 +42,7 @@ class Meta: sld_file = serializers.FileField(required=False) store_spatial_files = serializers.BooleanField(required=False, default=True) skip_existing_layers = serializers.BooleanField(required=False, default=False) - source = serializers.CharField(required=False, default="upload") + action = serializers.CharField(required=True) class OverwriteImporterSerializer(ImporterSerializer): diff --git a/geonode/upload/api/tests.py b/geonode/upload/api/tests.py index 5d6aa237c6f..95f722715c9 100644 --- a/geonode/upload/api/tests.py +++ b/geonode/upload/api/tests.py @@ -62,9 +62,7 @@ def test_upload_method_not_allowed(self): def test_raise_exception_if_file_is_not_a_handled(self): self.client.force_login(get_user_model().objects.get(username="admin")) - payload = { - "base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"), - } + payload = {"base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"), "action": "upload"} response = self.client.post(self.url, data=payload) self.assertEqual(500, response.status_code) @@ -76,6 +74,7 @@ def test_gpkg_raise_error_with_invalid_payload(self): content=b'{"type": "FeatureCollection", "content": "some-content"}', ), "store_spatial_files": "invalid", + "action": "upload", } expected = { "success": False, @@ -99,6 +98,7 @@ def test_gpkg_task_is_called(self, patch_upload): content=b'{"type": "FeatureCollection", "content": "some-content"}', ), "store_spatial_files": True, + "action": "upload", } response = self.client.post(self.url, data=payload) @@ -116,6 +116,7 @@ def test_geojson_task_is_called(self, patch_upload): content=b'{"type": "FeatureCollection", "content": "some-content"}', ), "store_spatial_files": True, + "action": "upload", } response = self.client.post(self.url, data=payload) @@ -133,6 +134,7 @@ def test_zip_file_is_unzip_and_the_handler_is_found(self, patch_upload): "base_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"), "zip_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"), "store_spatial_files": True, + "action": "upload", } response = self.client.post(self.url, data=payload) @@ -191,6 +193,7 @@ def test_asset_is_created_before_the_import_start(self, patch_upload): content=b'{"type": "FeatureCollection", "content": "some-content"}', ), "store_spatial_files": True, + "action": "upload", } response = self.client.post(self.url, data=payload) @@ -221,6 +224,7 @@ def test_asset_should_be_deleted_if_created_during_with_exception( content=b'{"type": "FeatureCollection", "content": "some-content"}', ), "store_spatial_files": True, + "action": "upload", } response = self.client.post(self.url, data=payload) diff --git a/geonode/upload/api/views.py b/geonode/upload/api/views.py index 53f86bab425..67fafe6a300 100644 --- a/geonode/upload/api/views.py +++ b/geonode/upload/api/views.py @@ -176,7 +176,6 @@ def create(self, request, *args, **kwargs): ) handler = orchestrator.get_handler(_data) - # not file but handler means that is a remote resource if handler: asset = None @@ -191,8 +190,6 @@ def create(self, request, *args, **kwargs): self.validate_upload(request, storage_manager) - action = ExecutionRequestAction.IMPORT.value - input_params = { **{"files": files, "handler_module_path": str(handler)}, **extracted_params, @@ -205,7 +202,7 @@ def create(self, request, *args, **kwargs): "asset_module_path": f"{asset.__module__}.{asset.__class__.__name__}", } ) - + action = input_params.get("action") execution_id = orchestrator.create_execution_request( user=request.user, func_name=next(iter(handler.get_task_list(action=action))), @@ -213,7 +210,6 @@ def create(self, request, *args, **kwargs): input_params=input_params, action=action, name=_file.name if _file else extracted_params.get("title", None), - source=extracted_params.get("source"), ) sig = import_orchestrator.s(files, str(execution_id), handler=str(handler), action=action) @@ -234,7 +230,7 @@ def create(self, request, *args, **kwargs): logger.exception(e) raise ImportException(detail=e.args[0] if len(e.args) > 0 else e) - raise ImportException(detail="No handlers found for this dataset type") + raise ImportException(detail="No handlers found for this dataset type/action") def _handle_asset(self, request, asset_dir, storage_manager, _data, handler): if storage_manager is None: @@ -328,7 +324,6 @@ def copy(self, request, *args, **kwargs): **{"handler_module_path": handler_module_path}, **extracted_params, }, - source="importer_copy", ) sig = import_orchestrator.s( diff --git a/geonode/upload/celery_tasks.py b/geonode/upload/celery_tasks.py index 6a270d70133..2aeb9170cf2 100644 --- a/geonode/upload/celery_tasks.py +++ b/geonode/upload/celery_tasks.py @@ -90,7 +90,7 @@ def import_orchestrator( step="start_import", layer_name=None, alternate=None, - action=exa.IMPORT.value, + action=exa.UPLOAD.value, **kwargs, ): """ @@ -179,7 +179,7 @@ def import_resource(self, execution_id, /, handler_module_path, action, **kwargs call_rollback_function( execution_id, handlers_module_path=handler_module_path, - prev_action=exa.IMPORT.value, + prev_action=exa.UPLOAD.value, layer=None, alternate=None, error=e, @@ -309,7 +309,7 @@ def create_geonode_resource( layer_name: Optional[str] = None, alternate: Optional[str] = None, handler_module_path: str = None, - action: str = exa.IMPORT.value, + action: str = exa.UPLOAD.value, **kwargs, ): """ diff --git a/geonode/upload/handlers/README.md b/geonode/upload/handlers/README.md index 255c2b08331..1c85117d55e 100644 --- a/geonode/upload/handlers/README.md +++ b/geonode/upload/handlers/README.md @@ -32,7 +32,7 @@ class BaseVectorFileHandler(BaseHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { + TASKS = { exa.IMPORT.value: (), # define the list of the step (celery task) needed to execute the action for the resource exa.COPY.value: (), exa.DELETE.value: (), @@ -242,7 +242,7 @@ class NewVectorFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { + TASKS = { exa.IMPORT.value: ( "start_import", "geonode.upload.import_resource", diff --git a/geonode/upload/handlers/apps.py b/geonode/upload/handlers/apps.py index 7e50e59aae5..72e26b7c585 100644 --- a/geonode/upload/handlers/apps.py +++ b/geonode/upload/handlers/apps.py @@ -41,21 +41,3 @@ def run_setup_hooks(*args, **kwargs): for item in _handlers: item.register() logger.info(f"The following handlers have been registered: {', '.join(available_handlers)}") - - _available_settings = [ - import_string(module_path)().supported_file_extension_config - for module_path in available_handlers - if import_string(module_path)().supported_file_extension_config - ] - # injecting the new config required for FE - supported_type = [] - supported_type.extend(_available_settings) - if not getattr(settings, "ADDITIONAL_DATASET_FILE_TYPES", None): - setattr(settings, "ADDITIONAL_DATASET_FILE_TYPES", supported_type) - elif "gpkg" not in [x.get("id") for x in settings.ADDITIONAL_DATASET_FILE_TYPES]: - settings.ADDITIONAL_DATASET_FILE_TYPES.extend(supported_type) - setattr( - settings, - "ADDITIONAL_DATASET_FILE_TYPES", - settings.ADDITIONAL_DATASET_FILE_TYPES, - ) diff --git a/geonode/upload/handlers/base.py b/geonode/upload/handlers/base.py index 5afb57bbe76..1c2407bf6c1 100644 --- a/geonode/upload/handlers/base.py +++ b/geonode/upload/handlers/base.py @@ -46,8 +46,8 @@ class BaseHandler(ABC): REGISTRY = [] - ACTIONS = { - exa.IMPORT.value: (), + TASKS = { + exa.UPLOAD.value: (), exa.COPY.value: (), exa.DELETE.value: (), exa.UPDATE.value: (), @@ -70,9 +70,9 @@ def get_registry(cls): @classmethod def get_task_list(cls, action) -> tuple: - if action not in cls.ACTIONS: + if action not in cls.TASKS: raise Exception("The requested action is not implemented yet") - return cls.ACTIONS.get(action) + return cls.TASKS.get(action) @property def default_geometry_column_name(self): @@ -140,7 +140,7 @@ def can_do(action) -> bool: the Handler must be ready to handle them. If is not in the actual flow the already in place flow is followd """ - return action in BaseHandler.ACTIONS + return action in BaseHandler.TASKS @staticmethod def extract_params_from_data(_data): @@ -300,7 +300,7 @@ def overwrite_resourcehandlerinfo( return self.create_resourcehandlerinfo(handler_module_path, resource, execution_id, **kwargs) def rollback(self, exec_id, rollback_from_step, action_to_rollback, *args, **kwargs): - steps = self.ACTIONS.get(action_to_rollback) + steps = self.TASKS.get(action_to_rollback) if rollback_from_step not in steps: logger.info(f"Step not found {rollback_from_step}, skipping") diff --git a/geonode/upload/handlers/common/metadata.py b/geonode/upload/handlers/common/metadata.py index 35374a42a59..06ab95acc1c 100644 --- a/geonode/upload/handlers/common/metadata.py +++ b/geonode/upload/handlers/common/metadata.py @@ -17,12 +17,9 @@ # ######################################################################### import logging -from geonode.resource.enumerator import ExecutionRequestAction as exa from geonode.upload.handlers.base import BaseHandler -from geonode.upload.handlers.utils import UploadSourcesEnum from geonode.upload.models import ResourceHandlerInfo from geonode.upload.handlers.xml.serializer import MetadataFileSerializer -from geonode.upload.utils import ImporterRequestAction as ira from geonode.upload.orchestrator import orchestrator from django.shortcuts import get_object_or_404 from geonode.layers.models import Dataset @@ -36,24 +33,6 @@ class MetadataFileHandler(BaseHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ("start_import", "geonode.upload.import_resource"), - ira.ROLLBACK.value: ( - "start_rollback", - "geonode.upload.rollback", - ), - } - - @staticmethod - def can_handle(_data) -> bool: - """ - This endpoint will return True or False if with the info provided - the handler is able to handle the file or not - """ - if _data.get("source", None) == UploadSourcesEnum.resource_file_upload.value: - return True - return False - @staticmethod def has_serializer(data) -> bool: _base = data.get("base_file") @@ -79,7 +58,7 @@ def extract_params_from_data(_data, action=None): "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), - "source": _data.pop("source", "resource_file_upload"), + "action": _data.pop("action"), }, _data @staticmethod diff --git a/geonode/upload/handlers/common/raster.py b/geonode/upload/handlers/common/raster.py index e581d739d1f..fd8e59b54ff 100644 --- a/geonode/upload/handlers/common/raster.py +++ b/geonode/upload/handlers/common/raster.py @@ -35,7 +35,7 @@ from geonode.upload.celery_tasks import ErrorBaseTaskClass, import_orchestrator from geonode.upload.handlers.base import BaseHandler from geonode.upload.handlers.geotiff.exceptions import InvalidGeoTiffException -from geonode.upload.handlers.utils import UploadSourcesEnum, create_alternate, should_be_imported +from geonode.upload.handlers.utils import create_alternate, should_be_imported from geonode.upload.models import ResourceHandlerInfo from geonode.upload.orchestrator import orchestrator from osgeo import gdal @@ -83,16 +83,6 @@ def is_valid(files, user, **kwargs): raise ImportException(stderr) return True - @staticmethod - def can_handle(_data) -> bool: - """ - This endpoint will return True or False if with the info provided - the handler is able to handle the file or not - """ - if _data.get("source", None) != UploadSourcesEnum.upload.value: - return False - return True - @staticmethod def has_serializer(_data) -> bool: """ @@ -107,7 +97,7 @@ def can_do(action) -> bool: This endpoint will return True or False if with the info provided the handler is able to handle the file or not """ - return action in BaseHandler.ACTIONS + return action in BaseHandler.TASKS @staticmethod def create_error_log(exc, task_name, *args): @@ -132,7 +122,7 @@ def extract_params_from_data(_data, action=None): "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), - "source": _data.pop("source", "upload"), + "action": _data.pop("action", "upload"), }, _data @staticmethod @@ -285,6 +275,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str: dataset = Dataset.objects.filter(pk=_exec.input_params.get("resource_pk")).first() if not dataset: raise ImportException("The dataset selected for the ovewrite does not exists") + if dataset.is_vector(): + raise Exception("cannot override a vector dataset with a raster one") alternate = dataset.alternate.split(":")[-1] orchestrator.update_execution_request_obj(_exec, {"geonode_resource": dataset}) else: @@ -293,6 +285,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str: dataset_exists = user_datasets.exists() if dataset_exists and should_be_overwritten: + if user_datasets.is_vector(): + raise Exception("cannot override a vector dataset with a raster one") layer_name, alternate = ( layer_name, user_datasets.first().alternate.split(":")[-1], @@ -310,7 +304,7 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str: "geonode.upload.import_resource", layer_name, alternate, - exa.IMPORT.value, + exa.UPLOAD.value, ) ) return layer_name, alternate, execution_id diff --git a/geonode/upload/handlers/common/remote.py b/geonode/upload/handlers/common/remote.py index f72579e2708..44105755902 100755 --- a/geonode/upload/handlers/common/remote.py +++ b/geonode/upload/handlers/common/remote.py @@ -47,8 +47,8 @@ class BaseRemoteResourceHandler(BaseHandler): As first implementation only remote 3dtiles are supported """ - ACTIONS = { - exa.IMPORT.value: ( + TASKS = { + exa.UPLOAD.value: ( "start_import", "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", @@ -105,7 +105,7 @@ def extract_params_from_data(_data, action=None): return {"title": title.pop("title"), "store_spatial_file": True}, _data return { - "source": _data.pop("source", "upload"), + "action": _data.pop("action", "upload"), "title": _data.pop("title", None), "url": _data.pop("url", None), "type": _data.pop("type", None), @@ -163,7 +163,7 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str: "geonode.upload.import_resource", layer_name, alternate, - exa.IMPORT.value, + exa.UPLOAD.value, ) ) return layer_name, alternate, execution_id diff --git a/geonode/upload/handlers/common/serializer.py b/geonode/upload/handlers/common/serializer.py index f1b12b7d8db..c1eab398781 100644 --- a/geonode/upload/handlers/common/serializer.py +++ b/geonode/upload/handlers/common/serializer.py @@ -26,7 +26,7 @@ class Meta: ref_name = "RemoteResourceSerializer" model = ResourceBase view_name = "importer_upload" - fields = ("url", "title", "type", "source", "overwrite_existing_layer") + fields = ("url", "title", "type", "action", "overwrite_existing_layer") url = serializers.URLField(required=True, help_text="URL of the remote service / resource") title = serializers.CharField(required=True, help_text="Title of the resource. Can be None or Empty") @@ -34,6 +34,6 @@ class Meta: required=True, help_text="Remote resource type, for example wms or 3dtiles. Is used by the handler to understand if can handle the resource", ) - source = serializers.CharField(required=False, default="upload") + action = serializers.CharField(required=True) overwrite_existing_layer = serializers.BooleanField(required=False, default=False) diff --git a/geonode/upload/handlers/common/test_remote.py b/geonode/upload/handlers/common/test_remote.py index 89430d618b9..c61b02981f1 100644 --- a/geonode/upload/handlers/common/test_remote.py +++ b/geonode/upload/handlers/common/test_remote.py @@ -82,16 +82,16 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 3) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 3) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( "start_copy", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_url_is_invalid(self): with self.assertRaises(ImportException) as _exc: @@ -106,7 +106,7 @@ def test_is_valid_should_pass_with_valid_url(self): def test_extract_params_from_data(self): actual, _data = self.handler.extract_params_from_data( _data={"defaults": '{"url": "http://abc123defsadsa.org", "title": "Remote Title", "type": "3dtiles"}'}, - action="import", + action="upload", ) self.assertTrue("title" in actual) self.assertTrue("url" in actual) diff --git a/geonode/upload/handlers/common/vector.py b/geonode/upload/handlers/common/vector.py index 3aa5a48040c..2aed508cb82 100644 --- a/geonode/upload/handlers/common/vector.py +++ b/geonode/upload/handlers/common/vector.py @@ -39,7 +39,6 @@ from geonode.upload.handlers.utils import ( GEOM_TYPE_MAPPING, STANDARD_TYPE_MAPPING, - UploadSourcesEnum, drop_dynamic_model_schema, ) from geonode.resource.manager import resource_manager @@ -55,6 +54,7 @@ from django.db.models import Q import pyproj from geonode.geoserver.security import delete_dataset_cache, set_geowebcache_invalidate_cache +from geonode.upload.utils import ImporterRequestAction as ira logger = logging.getLogger("importer") @@ -65,6 +65,32 @@ class BaseVectorFileHandler(BaseHandler): It must provide the task_lists required to comple the upload """ + TASKS = { + exa.UPLOAD.value: ( + "start_import", + "geonode.upload.import_resource", + "geonode.upload.publish_resource", + "geonode.upload.create_geonode_resource", + ), + exa.COPY.value: ( + "start_copy", + "geonode.upload.copy_dynamic_model", + "geonode.upload.copy_geonode_data_table", + "geonode.upload.publish_resource", + "geonode.upload.copy_geonode_resource", + ), + ira.ROLLBACK.value: ( + "start_rollback", + "geonode.upload.rollback", + ), + ira.REPLACE.value: ( + "start_import", + "geonode.upload.import_resource", + "geonode.upload.publish_resource", + "geonode.upload.create_geonode_resource", + ), + } + @property def default_geometry_column_name(self): return "geometry" @@ -99,7 +125,7 @@ def can_handle(_data) -> bool: This endpoint will return True or False if with the info provided the handler is able to handle the file or not """ - if _data.get("source", None) != UploadSourcesEnum.upload.value: + if _data.get("action", None) not in BaseVectorFileHandler.TASKS: return False return True @@ -117,7 +143,7 @@ def can_do(action) -> bool: This endpoint will return True or False if with the info provided the handler is able to handle the file or not """ - return action in BaseHandler.ACTIONS + return action in BaseHandler.TASKS @staticmethod def create_error_log(exc, task_name, *args): @@ -142,7 +168,7 @@ def extract_params_from_data(_data, action=None): "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), - "source": _data.pop("source", "upload"), + "action": _data.pop("action", "upload"), }, _data @staticmethod @@ -431,6 +457,9 @@ def find_alternate_by_dataset(self, _exec_obj, layer_name, should_be_overwritten dataset = Dataset.objects.filter(pk=_exec_obj.input_params.get("resource_pk")).first() if not dataset: raise ImportException("The dataset selected for the ovewrite does not exists") + if should_be_overwritten: + if not dataset.is_vector(): + raise Exception("Cannot override a raster dataset with a vector one") alternate = dataset.alternate.split(":") return alternate[-1] @@ -438,6 +467,9 @@ def find_alternate_by_dataset(self, _exec_obj, layer_name, should_be_overwritten dataset_available = Dataset.objects.filter(alternate__iexact=f"{workspace.name}:{layer_name}") dataset_exists = dataset_available.exists() + if should_be_overwritten: + if not dataset_available.is_vector(): + raise Exception("Cannot override a raster dataset with a vector one") if dataset_exists and should_be_overwritten: alternate = dataset_available.first().alternate.split(":")[-1] @@ -851,7 +883,7 @@ def import_next_step( actual_step, layer_name, alternate, - exa.IMPORT.value, + exa.UPLOAD.value, ) import_orchestrator.apply_async(task_params, kwargs) @@ -859,7 +891,7 @@ def import_next_step( call_rollback_function( execution_id, handlers_module_path=handlers_module_path, - prev_action=exa.IMPORT.value, + prev_action=exa.UPLOAD.value, layer=layer_name, alternate=alternate, error=e, @@ -927,7 +959,7 @@ def import_with_ogr2ogr( call_rollback_function( execution_id, handlers_module_path=handler_module_path, - prev_action=exa.IMPORT.value, + prev_action=exa.UPLOAD.value, layer=original_name, alternate=alternate, error=e, diff --git a/geonode/upload/handlers/csv/handler.py b/geonode/upload/handlers/csv/handler.py index 1e3483a25a8..3d26a4d6e66 100644 --- a/geonode/upload/handlers/csv/handler.py +++ b/geonode/upload/handlers/csv/handler.py @@ -29,7 +29,6 @@ from dynamic_models.models import ModelSchema from geonode.upload.handlers.common.vector import BaseVectorFileHandler from geonode.upload.handlers.utils import GEOM_TYPE_MAPPING -from geonode.upload.utils import ImporterRequestAction as ira logger = logging.getLogger("importer") @@ -40,26 +39,6 @@ class CSVFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( - "start_import", - "geonode.upload.import_resource", - "geonode.upload.publish_resource", - "geonode.upload.create_geonode_resource", - ), - exa.COPY.value: ( - "start_copy", - "geonode.upload.copy_dynamic_model", - "geonode.upload.copy_geonode_data_table", - "geonode.upload.publish_resource", - "geonode.upload.copy_geonode_resource", - ), - ira.ROLLBACK.value: ( - "start_rollback", - "geonode.upload.rollback", - ), - } - possible_geometry_column_name = ["geom", "geometry", "wkt_geom", "the_geom"] possible_lat_column = ["latitude", "lat", "y"] possible_long_column = ["longitude", "long", "x"] @@ -69,11 +48,15 @@ class CSVFileHandler(BaseVectorFileHandler): def supported_file_extension_config(self): return { "id": "csv", - "label": "CSV", - "format": "vector", - "mimeType": ["text/csv"], - "ext": ["csv"], - "optional": ["sld", "xml"], + "formats": [ + { + "label": "CSV", + "required_ext": ["csv"], + "optional_ext": ["sld", "xml"], + } + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @staticmethod diff --git a/geonode/upload/handlers/csv/tests.py b/geonode/upload/handlers/csv/tests.py index dc75996bef4..6321b880a88 100644 --- a/geonode/upload/handlers/csv/tests.py +++ b/geonode/upload/handlers/csv/tests.py @@ -46,7 +46,7 @@ def setUpClass(cls): cls.missing_geom = f"{project_dir}/tests/fixture/missing_geom.csv" cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_files = {"base_file": cls.invalid_csv} - cls.valid_files = {"base_file": cls.valid_csv, "source": "upload"} + cls.valid_files = {"base_file": cls.valid_csv, "action": "upload"} cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="test", owner=cls.owner) @@ -57,8 +57,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( @@ -68,8 +68,8 @@ def test_task_list_is_the_expected_one_geojson(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 5) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 5) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_csv_is_invalid(self): with self.assertRaises(InvalidCSVException) as _exc: diff --git a/geonode/upload/handlers/geojson/handler.py b/geonode/upload/handlers/geojson/handler.py index 6060be709bf..eb4c740463d 100644 --- a/geonode/upload/handlers/geojson/handler.py +++ b/geonode/upload/handlers/geojson/handler.py @@ -19,11 +19,9 @@ import json import logging import os -from geonode.resource.enumerator import ExecutionRequestAction as exa from geonode.upload.utils import UploadLimitValidator from geonode.upload.handlers.common.vector import BaseVectorFileHandler from osgeo import ogr -from geonode.upload.utils import ImporterRequestAction as ira from geonode.upload.handlers.geojson.exceptions import InvalidGeoJsonException @@ -36,34 +34,24 @@ class GeoJsonFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( - "start_import", - "geonode.upload.import_resource", - "geonode.upload.publish_resource", - "geonode.upload.create_geonode_resource", - ), - exa.COPY.value: ( - "start_copy", - "geonode.upload.copy_dynamic_model", - "geonode.upload.copy_geonode_data_table", - "geonode.upload.publish_resource", - "geonode.upload.copy_geonode_resource", - ), - ira.ROLLBACK.value: ( - "start_rollback", - "geonode.upload.rollback", - ), - } - @property def supported_file_extension_config(self): return { "id": "geojson", - "label": "GeoJSON", - "format": "vector", - "ext": ["json", "geojson"], - "optional": ["xml", "sld"], + "formats": [ + { + "label": "GeoJSON", + "required_ext": ["geojson"], + "optional_ext": ["sld", "xml"], + }, + { + "label": "GeoJSON", + "required_ext": ["json"], + "optional_ext": ["sld", "xml"], + }, + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @staticmethod diff --git a/geonode/upload/handlers/geojson/tests.py b/geonode/upload/handlers/geojson/tests.py index acf5913e74e..ba222c2dec3 100644 --- a/geonode/upload/handlers/geojson/tests.py +++ b/geonode/upload/handlers/geojson/tests.py @@ -43,7 +43,7 @@ def setUpClass(cls): cls.invalid_geojson = f"{project_dir}/tests/fixture/invalid.geojson" cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_files = {"base_file": cls.invalid_geojson} - cls.valid_files = {"base_file": cls.valid_geojson, "source": "upload"} + cls.valid_files = {"base_file": cls.valid_geojson, "action": "upload"} cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="stazioni_metropolitana", owner=cls.owner) @@ -54,8 +54,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_copy(self): expected = ( @@ -65,8 +65,8 @@ def test_task_list_is_the_expected_one_copy(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 5) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 5) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_parallelism_is_met(self): parallelism, created = UploadParallelismLimit.objects.get_or_create(slug="default_max_parallel_uploads") diff --git a/geonode/upload/handlers/geotiff/handler.py b/geonode/upload/handlers/geotiff/handler.py index e49584a9639..232440360df 100644 --- a/geonode/upload/handlers/geotiff/handler.py +++ b/geonode/upload/handlers/geotiff/handler.py @@ -34,8 +34,8 @@ class GeoTiffFileHandler(BaseRasterFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( + TASKS = { + exa.UPLOAD.value: ( "start_import", "geonode.upload.import_resource", "geonode.upload.publish_resource", @@ -51,17 +51,42 @@ class GeoTiffFileHandler(BaseRasterFileHandler): "start_rollback", "geonode.upload.rollback", ), + ira.REPLACE.value: ( + "start_import", + "geonode.upload.import_resource", + "geonode.upload.publish_resource", + "geonode.upload.create_geonode_resource", + ), } @property def supported_file_extension_config(self): return { "id": "tiff", - "label": "GeoTIFF", - "format": "raster", - "ext": ["tiff", "tif", "geotiff", "geotif"], - "mimeType": ["image/tiff"], - "optional": ["xml", "sld"], + "formats": [ + { + "label": "TIFF", + "required_ext": ["tiff"], + "optional_ext": ["xml", "sld"], + }, + { + "label": "TIF", + "required_ext": ["tif"], + "optional_ext": ["xml", "sld"], + }, + { + "label": "GeoTIFF", + "required_ext": ["geotiff"], + "optional_ext": ["xml", "sld"], + }, + { + "label": "GeoTIF", + "required_ext": ["geotif"], + "optional_ext": ["xml", "sld"], + }, + ], + "actions": list(self.TASKS.keys()), + "type": "raster", } @staticmethod @@ -74,7 +99,7 @@ def can_handle(_data) -> bool: if not base: return False ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1] - return ext in ["tiff", "geotiff", "tif", "geotif"] and BaseRasterFileHandler.can_handle(_data) + return ext in ["tiff", "geotiff", "tif", "geotif"] and _data.get("action", None) in GeoTiffFileHandler.TASKS @staticmethod def is_valid(files, user, **kwargs): diff --git a/geonode/upload/handlers/geotiff/tests.py b/geonode/upload/handlers/geotiff/tests.py index 882236409a9..be18b958896 100644 --- a/geonode/upload/handlers/geotiff/tests.py +++ b/geonode/upload/handlers/geotiff/tests.py @@ -35,7 +35,7 @@ def setUpClass(cls): super().setUpClass() cls.handler = GeoTiffFileHandler() cls.valid_tiff = f"{project_dir}/tests/fixture/test_raster.tif" - cls.valid_files = {"base_file": cls.valid_tiff, "source": "upload"} + cls.valid_files = {"base_file": cls.valid_tiff, "action": "upload"} cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_tiff = {"base_file": "invalid.file.foo"} cls.owner = get_user_model().objects.first() @@ -48,8 +48,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_copy(self): expected = ( @@ -58,8 +58,8 @@ def test_task_list_is_the_expected_one_copy(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_parallelism_is_met(self): parallelism, created = UploadParallelismLimit.objects.get_or_create(slug="default_max_parallel_uploads") diff --git a/geonode/upload/handlers/gpkg/handler.py b/geonode/upload/handlers/gpkg/handler.py index 1742d12c9e1..59553de453a 100644 --- a/geonode/upload/handlers/gpkg/handler.py +++ b/geonode/upload/handlers/gpkg/handler.py @@ -37,8 +37,8 @@ class GPKGFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( + TASKS = { + exa.UPLOAD.value: ( "start_import", "geonode.upload.import_resource", "geonode.upload.publish_resource", @@ -61,9 +61,14 @@ class GPKGFileHandler(BaseVectorFileHandler): def supported_file_extension_config(self): return { "id": "gpkg", - "label": "GeoPackage", - "format": "vector", - "ext": ["gpkg"], + "formats": [ + { + "label": "GeoPackage", + "required_ext": ["gpkg"], + } + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @property @@ -84,9 +89,9 @@ def can_handle(_data) -> bool: base = _data.get("base_file") if not base: return False - return ( - base.endswith(".gpkg") if isinstance(base, str) else base.name.endswith(".gpkg") - ) and BaseVectorFileHandler.can_handle(_data) + return (base.endswith(".gpkg") if isinstance(base, str) else base.name.endswith(".gpkg")) and _data.get( + "action", None + ) in GPKGFileHandler.TASKS @staticmethod def is_valid(files, user, **kwargs): diff --git a/geonode/upload/handlers/gpkg/tests.py b/geonode/upload/handlers/gpkg/tests.py index 32770d5c393..8ea31059548 100644 --- a/geonode/upload/handlers/gpkg/tests.py +++ b/geonode/upload/handlers/gpkg/tests.py @@ -44,7 +44,7 @@ def setUpClass(cls): cls.invalid_gpkg = f"{project_dir}/tests/fixture/invalid.gpkg" cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_files = {"base_file": cls.invalid_gpkg} - cls.valid_files = {"base_file": cls.valid_gpkg, "source": "upload"} + cls.valid_files = {"base_file": cls.valid_gpkg, "action": "upload"} cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="stazioni_metropolitana", owner=cls.owner) @@ -55,8 +55,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( @@ -66,8 +66,8 @@ def test_task_list_is_the_expected_one_geojson(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 5) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 5) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_gpkg_is_invalid(self): with self.assertRaises(InvalidGeopackageException) as _exc: diff --git a/geonode/upload/handlers/kml/handler.py b/geonode/upload/handlers/kml/handler.py index 8ab7f9d00a4..aaaa07cb4c5 100644 --- a/geonode/upload/handlers/kml/handler.py +++ b/geonode/upload/handlers/kml/handler.py @@ -37,8 +37,8 @@ class KMLFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( + TASKS = { + exa.UPLOAD.value: ( "start_import", "geonode.upload.import_resource", "geonode.upload.publish_resource", @@ -61,9 +61,15 @@ class KMLFileHandler(BaseVectorFileHandler): def supported_file_extension_config(self): return { "id": "kml", - "label": "KML/KMZ", - "format": "vector", - "ext": ["kml", "kmz"], + "formats": [ + {"label": "KML", "required_ext": ["kml"]}, + { + "label": "KMZ", + "required_ext": ["kmz"], + }, + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @property diff --git a/geonode/upload/handlers/kml/tests.py b/geonode/upload/handlers/kml/tests.py index 9fe0a873c47..2b03a0a0a06 100644 --- a/geonode/upload/handlers/kml/tests.py +++ b/geonode/upload/handlers/kml/tests.py @@ -38,7 +38,7 @@ def setUpClass(cls): cls.invalid_kml = f"{project_dir}/tests/fixture/inva.lid.kml" cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_files = {"base_file": cls.invalid_kml} - cls.valid_files = {"base_file": cls.valid_kml, "source": "upload"} + cls.valid_files = {"base_file": cls.valid_kml, "action": "upload"} cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="extruded_polygon", owner=cls.owner) @@ -49,8 +49,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( @@ -60,8 +60,8 @@ def test_task_list_is_the_expected_one_geojson(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 5) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 5) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_kml_is_invalid(self): with self.assertRaises(InvalidKmlException) as _exc: diff --git a/geonode/upload/handlers/remote/tests/test_3dtiles.py b/geonode/upload/handlers/remote/tests/test_3dtiles.py index 675b65e92f4..f5216ec9414 100644 --- a/geonode/upload/handlers/remote/tests/test_3dtiles.py +++ b/geonode/upload/handlers/remote/tests/test_3dtiles.py @@ -83,16 +83,16 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 3) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 3) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( "start_copy", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_url_is_invalid(self): with self.assertRaises(ImportException) as _exc: @@ -107,7 +107,7 @@ def test_is_valid_should_pass_with_valid_url(self): def test_extract_params_from_data(self): actual, _data = self.handler.extract_params_from_data( _data={"defaults": '{"url": "http://abc123defsadsa.org", "title": "Remote Title", "type": "3dtiles"}'}, - action="import", + action="upload", ) self.assertTrue("title" in actual) self.assertTrue("url" in actual) diff --git a/geonode/upload/handlers/remote/tests/test_wms.py b/geonode/upload/handlers/remote/tests/test_wms.py index 5330343b405..1bb00c1a71a 100644 --- a/geonode/upload/handlers/remote/tests/test_wms.py +++ b/geonode/upload/handlers/remote/tests/test_wms.py @@ -35,9 +35,7 @@ class TestRemoteWMSResourceHandler(TestCase): def setUpClass(cls): super().setUpClass() cls.handler = RemoteWMSResourceHandler() - cls.valid_url = ( - "https://development.demo.geonode.org/geoserver/ows?service=WMS&version=1.3.0&request=GetCapabilities" - ) + cls.valid_url = "http://geoserver:8080/geoserver/ows?service=WMS&version=1.3.0&request=GetCapabilities" cls.user, _ = get_user_model().objects.get_or_create(username="admin") cls.invalid_payload = { "url": "http://invalid.com", @@ -96,16 +94,16 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 3) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 3) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_geojson(self): expected = ( "start_copy", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_url_is_invalid(self): with self.assertRaises(ImportException) as _exc: @@ -120,7 +118,7 @@ def test_is_valid_should_pass_with_valid_url(self): def test_extract_params_from_data(self): actual, _data = self.handler.extract_params_from_data( _data={"defaults": f"{self.valid_payload_with_parse_true}"}, - action="import", + action="upload", ) self.assertTrue("title" in actual) self.assertTrue("url" in actual) diff --git a/geonode/upload/handlers/remote/tiles3d.py b/geonode/upload/handlers/remote/tiles3d.py index 94f04199abc..c47df255e2a 100644 --- a/geonode/upload/handlers/remote/tiles3d.py +++ b/geonode/upload/handlers/remote/tiles3d.py @@ -33,6 +33,10 @@ class RemoteTiles3DResourceHandler(BaseRemoteResourceHandler, Tiles3DFileHandler): + @property + def supported_file_extension_config(self): + return {} + @staticmethod def has_serializer(data) -> bool: if "url" in data and "3dtiles" in data.get("type", "").lower(): diff --git a/geonode/upload/handlers/shapefile/handler.py b/geonode/upload/handlers/shapefile/handler.py index 1f5951c32b8..ff7033c9840 100644 --- a/geonode/upload/handlers/shapefile/handler.py +++ b/geonode/upload/handlers/shapefile/handler.py @@ -29,7 +29,6 @@ from geonode.upload.handlers.shapefile.exceptions import InvalidShapeFileException from geonode.upload.handlers.shapefile.serializer import OverwriteShapeFileSerializer, ShapeFileSerializer -from geonode.upload.utils import ImporterRequestAction as ira logger = logging.getLogger("importer") @@ -40,35 +39,19 @@ class ShapeFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( - "start_import", - "geonode.upload.import_resource", - "geonode.upload.publish_resource", - "geonode.upload.create_geonode_resource", - ), - exa.COPY.value: ( - "start_copy", - "geonode.upload.copy_dynamic_model", - "geonode.upload.copy_geonode_data_table", - "geonode.upload.publish_resource", - "geonode.upload.copy_geonode_resource", - ), - ira.ROLLBACK.value: ( - "start_rollback", - "geonode.upload.rollback", - ), - } - @property def supported_file_extension_config(self): return { "id": "shp", - "label": "ESRI Shapefile", - "format": "vector", - "ext": ["shp"], - "requires": ["shp", "prj", "dbf", "shx"], - "optional": ["xml", "sld", "cpg", "cst"], + "formats": [ + { + "label": "ESRI Shapefile", + "required_ext": ["shp", "prj", "dbf", "shx"], + "optional_ext": ["xml", "sld", "cpg", "cst"], + } + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @staticmethod @@ -110,7 +93,7 @@ def extract_params_from_data(_data, action=None): "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), "resource_pk": _data.pop("resource_pk", None), "store_spatial_file": _data.pop("store_spatial_files", "True"), - "source": _data.pop("source", "upload"), + "action": _data.pop("action", "upload"), } return additional_params, _data @@ -130,7 +113,7 @@ def is_valid(files, user, **kwargs): _filename = Path(_file).stem - _shp_ext_needed = [x["requires"] for x in get_supported_datasets_file_types() if x["id"] == "shp"][0] + _shp_ext_needed = ShapeFileHandler._get_ext_needed() """ Check if the ext required for the shape file are available in the files uploaded @@ -156,6 +139,13 @@ def is_valid(files, user, **kwargs): return True + @staticmethod + def _get_ext_needed(): + for x in get_supported_datasets_file_types(): + if x["id"] == "shp": + for item in x["formats"][0]["required_ext"]: + yield item + def get_ogr2ogr_driver(self): return ogr.GetDriverByName("ESRI Shapefile") diff --git a/geonode/upload/handlers/shapefile/serializer.py b/geonode/upload/handlers/shapefile/serializer.py index cf7aa407436..4091cddb5ac 100644 --- a/geonode/upload/handlers/shapefile/serializer.py +++ b/geonode/upload/handlers/shapefile/serializer.py @@ -36,7 +36,7 @@ class Meta: "store_spatial_files", "overwrite_existing_layer", "skip_existing_layers", - "source", + "action", ) base_file = serializers.FileField() @@ -48,7 +48,7 @@ class Meta: store_spatial_files = serializers.BooleanField(required=False, default=True) overwrite_existing_layer = serializers.BooleanField(required=False, default=False) skip_existing_layers = serializers.BooleanField(required=False, default=False) - source = serializers.CharField(required=False, default="upload") + action = serializers.CharField(required=True) class OverwriteShapeFileSerializer(ShapeFileSerializer): diff --git a/geonode/upload/handlers/shapefile/tests.py b/geonode/upload/handlers/shapefile/tests.py index f643306bf7e..d90c0fc06e8 100644 --- a/geonode/upload/handlers/shapefile/tests.py +++ b/geonode/upload/handlers/shapefile/tests.py @@ -48,7 +48,7 @@ def setUpClass(cls): "dbf_file": f"{file_path}/san_andres_y_providencia_highway.dbf", "prj_file": f"{file_path}/san_andres_y_providencia_highway.prj", "shx_file": f"{file_path}/san_andres_y_providencia_highway.shx", - "source": "upload", + "action": "upload", } cls.invalid_shp = f"{project_dir}/tests/fixture/invalid.geojson" cls.user, _ = get_user_model().objects.get_or_create(username="admin") @@ -62,8 +62,8 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 4) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 4) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_copy_task_list_is_the_expected_one(self): expected = ( @@ -73,8 +73,8 @@ def test_copy_task_list_is_the_expected_one(self): "geonode.upload.publish_resource", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 5) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 5) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_parallelism_is_met(self): parallelism, created = UploadParallelismLimit.objects.get_or_create(slug="default_max_parallel_uploads") diff --git a/geonode/upload/handlers/sld/handler.py b/geonode/upload/handlers/sld/handler.py index c6cd25863a4..14c5efb736f 100644 --- a/geonode/upload/handlers/sld/handler.py +++ b/geonode/upload/handlers/sld/handler.py @@ -22,6 +22,7 @@ from geonode.upload.handlers.common.metadata import MetadataFileHandler from geonode.upload.handlers.sld.exceptions import InvalidSldException from owslib.etree import etree as dlxml +from geonode.upload.utils import ImporterRequestAction as ira logger = logging.getLogger("importer") @@ -32,25 +33,26 @@ class SLDFileHandler(MetadataFileHandler): It must provide the task_lists required to comple the upload """ + TASKS = { + ira.RESOURCE_STYLE_UPLOAD.value: ("start_import", "geonode.upload.import_resource"), + ira.ROLLBACK.value: ( + "start_rollback", + "geonode.upload.rollback", + ), + } + @property def supported_file_extension_config(self): return { "id": "sld", - "label": "Styled Layer Descriptor (SLD)", - "format": "metadata", - "ext": ["sld"], - "mimeType": ["application/json"], - "needsFiles": [ - "shp", - "prj", - "dbf", - "shx", - "csv", - "tiff", - "zip", - "xml", - "geojson", + "formats": [ + { + "label": "Styled Layer Descriptor 1.0, 1.1 (SLD)", + "required_ext": ["sld"], + } ], + "actions": list(self.TASKS.keys()), + "type": "metadata", } @staticmethod @@ -62,9 +64,9 @@ def can_handle(_data) -> bool: base = _data.get("base_file") if not base: return False - return ( - base.endswith(".sld") if isinstance(base, str) else base.name.endswith(".sld") - ) and MetadataFileHandler.can_handle(_data) + return (base.endswith(".sld") if isinstance(base, str) else base.name.endswith(".sld")) and _data.get( + "action", None + ) == ira.RESOURCE_STYLE_UPLOAD.value @staticmethod def is_valid(files, user, **kwargs): diff --git a/geonode/upload/handlers/sld/tests.py b/geonode/upload/handlers/sld/tests.py index 1864ba2dee1..2cbc7931382 100644 --- a/geonode/upload/handlers/sld/tests.py +++ b/geonode/upload/handlers/sld/tests.py @@ -45,7 +45,7 @@ def setUpClass(cls): cls.valid_files = { "base_file": "/tmp/test_sld.sld", "sld_file": "/tmp/test_sld.sld", - "source": "resource_file_upload", + "action": "resource_style_upload", } cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="sld_dataset", owner=cls.owner) @@ -59,8 +59,8 @@ def test_task_list_is_the_expected_one(self): "start_import", "geonode.upload.import_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["resource_style_upload"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["resource_style_upload"]) def test_is_valid_should_raise_exception_if_the_sld_is_invalid(self): with self.assertRaises(InvalidSldException) as _exc: diff --git a/geonode/upload/handlers/tiles3d/handler.py b/geonode/upload/handlers/tiles3d/handler.py index 43cbe1ba498..08a25443652 100755 --- a/geonode/upload/handlers/tiles3d/handler.py +++ b/geonode/upload/handlers/tiles3d/handler.py @@ -42,8 +42,8 @@ class Tiles3DFileHandler(BaseVectorFileHandler): It must provide the task_lists required to comple the upload """ - ACTIONS = { - exa.IMPORT.value: ( + TASKS = { + exa.UPLOAD.value: ( "start_import", "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", @@ -62,10 +62,14 @@ class Tiles3DFileHandler(BaseVectorFileHandler): def supported_file_extension_config(self): return { "id": "3dtiles", - "label": "3D Tiles", - "format": "vector", - "ext": ["json"], - "optional": ["xml", "sld"], + "formats": [ + { + "label": "3D Tiles", + "required_ext": ["zip"], + } + ], + "actions": list(self.TASKS.keys()), + "type": "vector", } @staticmethod @@ -151,7 +155,7 @@ def extract_params_from_data(_data, action=None): return { "skip_existing_layers": _data.pop("skip_existing_layers", "False"), "store_spatial_file": _data.pop("store_spatial_files", "True"), - "source": _data.pop("source", "upload"), + "action": _data.pop("action", "upload"), "original_zip_name": _data.pop("original_zip_name", None), "overwrite_existing_layer": _data.pop("overwrite_existing_layer", False), }, _data @@ -198,7 +202,7 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str: "geonode.upload.import_resource", layer_name, alternate, - exa.IMPORT.value, + exa.UPLOAD.value, ) ) return layer_name, alternate, execution_id diff --git a/geonode/upload/handlers/tiles3d/tests.py b/geonode/upload/handlers/tiles3d/tests.py index 8bbfc66cb2e..6aa3982d3e2 100755 --- a/geonode/upload/handlers/tiles3d/tests.py +++ b/geonode/upload/handlers/tiles3d/tests.py @@ -58,16 +58,16 @@ def test_task_list_is_the_expected_one(self): "geonode.upload.import_resource", "geonode.upload.create_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 3) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["upload"]), 3) + self.assertTupleEqual(expected, self.handler.TASKS["upload"]) def test_task_list_is_the_expected_one_copy(self): expected = ( "start_copy", "geonode.upload.copy_geonode_resource", ) - self.assertEqual(len(self.handler.ACTIONS["copy"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["copy"]) + self.assertEqual(len(self.handler.TASKS["copy"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["copy"]) def test_is_valid_should_raise_exception_if_the_parallelism_is_met(self): parallelism, created = UploadParallelismLimit.objects.get_or_create(slug="default_max_parallel_uploads") @@ -190,10 +190,14 @@ def test_supported_file_extension_config(self): """ expected = { "id": "3dtiles", - "label": "3D Tiles", - "format": "vector", - "ext": ["json"], - "optional": ["xml", "sld"], + "formats": [ + { + "label": "3D Tiles", + "required_ext": ["zip"], + } + ], + "actions": list(Tiles3DFileHandler.TASKS.keys()), + "type": "vector", } actual = self.handler.supported_file_extension_config self.assertDictEqual(actual, expected) diff --git a/geonode/upload/handlers/utils.py b/geonode/upload/handlers/utils.py index e6ea039d3f2..f4908d43b5a 100644 --- a/geonode/upload/handlers/utils.py +++ b/geonode/upload/handlers/utils.py @@ -16,7 +16,6 @@ # along with this program. If not, see . # ######################################################################### -import enum import hashlib from django.contrib.auth import get_user_model @@ -32,12 +31,6 @@ logger = logging.getLogger("importer") -# TODO this part should be improved when we will drop the legacy upload templates -class UploadSourcesEnum(enum.Enum): - upload = "upload" # used in the default upload flow - resource_file_upload = "resource_file_upload" # source used for the single resource metadata upload - - STANDARD_TYPE_MAPPING = { "Integer64": "django.db.models.IntegerField", "Integer": "django.db.models.IntegerField", diff --git a/geonode/upload/handlers/xml/handler.py b/geonode/upload/handlers/xml/handler.py index a6f6340689f..58c64ac3a8a 100644 --- a/geonode/upload/handlers/xml/handler.py +++ b/geonode/upload/handlers/xml/handler.py @@ -22,6 +22,7 @@ from geonode.upload.handlers.common.metadata import MetadataFileHandler from geonode.upload.handlers.xml.exceptions import InvalidXmlException from owslib.etree import etree as dlxml +from geonode.upload.utils import ImporterRequestAction as ira logger = logging.getLogger("importer") @@ -32,25 +33,26 @@ class XMLFileHandler(MetadataFileHandler): It must provide the task_lists required to comple the upload """ + TASKS = { + ira.RESOURCE_METADATA_UPLOAD.value: ("start_import", "geonode.upload.import_resource"), + ira.ROLLBACK.value: ( + "start_rollback", + "geonode.upload.rollback", + ), + } + @property def supported_file_extension_config(self): return { "id": "xml", - "label": "XML Metadata File", - "format": "metadata", - "ext": ["xml"], - "mimeType": ["application/json"], - "needsFiles": [ - "shp", - "prj", - "dbf", - "shx", - "csv", - "tiff", - "zip", - "sld", - "geojson", + "formats": [ + { + "label": "XML Metadata File (XML - ISO, FGDC, ebRIM, Dublin Core)", + "required_ext": ["xml"], + } ], + "actions": list(self.TASKS.keys()), + "type": "metadata", } @staticmethod @@ -62,9 +64,9 @@ def can_handle(_data) -> bool: base = _data.get("base_file") if not base: return False - return ( - base.endswith(".xml") if isinstance(base, str) else base.name.endswith(".xml") - ) and MetadataFileHandler.can_handle(_data) + return (base.endswith(".xml") if isinstance(base, str) else base.name.endswith(".xml")) and _data.get( + "action", None + ) == ira.RESOURCE_METADATA_UPLOAD.value @staticmethod def is_valid(files, user=None, **kwargs): diff --git a/geonode/upload/handlers/xml/serializer.py b/geonode/upload/handlers/xml/serializer.py index a28ddd2118a..19bb53fb61c 100644 --- a/geonode/upload/handlers/xml/serializer.py +++ b/geonode/upload/handlers/xml/serializer.py @@ -27,9 +27,9 @@ class Meta: ref_name = "MetadataFileSerializer" model = ResourceBase view_name = "importer_upload" - fields = ("overwrite_existing_layer", "resource_pk", "base_file", "source") + fields = ("overwrite_existing_layer", "resource_pk", "base_file", "action") base_file = serializers.FileField() overwrite_existing_layer = serializers.BooleanField(required=False, default=True) resource_pk = serializers.CharField(required=True) - source = serializers.CharField(required=False, default="resource_file_upload") + action = serializers.CharField(required=True) diff --git a/geonode/upload/handlers/xml/tests.py b/geonode/upload/handlers/xml/tests.py index af9916a1fc8..b96147f4d94 100644 --- a/geonode/upload/handlers/xml/tests.py +++ b/geonode/upload/handlers/xml/tests.py @@ -44,7 +44,7 @@ def setUpClass(cls): cls.valid_files = { "base_file": "/tmp/test_xml.xml", "xml_file": "/tmp/test_xml.xml", - "source": "resource_file_upload", + "action": "resource_metadata_upload", } cls.owner = get_user_model().objects.first() cls.layer = create_single_dataset(name="extruded_polygon", owner=cls.owner) @@ -58,8 +58,8 @@ def test_task_list_is_the_expected_one(self): "start_import", "geonode.upload.import_resource", ) - self.assertEqual(len(self.handler.ACTIONS["import"]), 2) - self.assertTupleEqual(expected, self.handler.ACTIONS["import"]) + self.assertEqual(len(self.handler.TASKS["resource_metadata_upload"]), 2) + self.assertTupleEqual(expected, self.handler.TASKS["resource_metadata_upload"]) def test_is_valid_should_raise_exception_if_the_xml_is_invalid(self): with self.assertRaises(InvalidXmlException) as _exc: diff --git a/geonode/upload/orchestrator.py b/geonode/upload/orchestrator.py index 1611e4f6d91..a3d9ecab88a 100644 --- a/geonode/upload/orchestrator.py +++ b/geonode/upload/orchestrator.py @@ -48,19 +48,29 @@ class ImportOrchestrator: """ + def get_handler_registry(self): + return BaseHandler.get_registry() + def get_handler(self, _data) -> Optional[BaseHandler]: """ If is part of the supported format, return the handler which can handle the import otherwise return None """ - for handler in BaseHandler.get_registry(): - if handler.can_handle(_data): - return handler() - logger.error("Handler not found") + for handler in self.get_handler_registry(): + can_handle = handler.can_handle(_data) + match can_handle: + case True: + return handler() + case False: + logger.info( + f"The handler {str(handler)} cannot manage the requested action: {_data.get('action', None)}" + ) + + logger.error("No handlers found for this dataset type/action") return None def get_serializer(self, _data) -> serializers.Serializer: - for handler in BaseHandler.get_registry(): + for handler in self.get_handler_registry(): _serializer = handler.has_serializer(_data) if _serializer: return _serializer @@ -77,7 +87,7 @@ def load_handler(self, module_path): raise ImportException(detail=f"The handler is not available: {module_path}") def load_handler_by_id(self, handler_id): - for handler in BaseHandler.get_registry(): + for handler in self.get_handler_registry(): if handler().id == handler_id: return handler logger.error("Handler not found") @@ -300,7 +310,6 @@ def create_execution_request( input_params=input_params, action=action, name=name, - source=source, ) return execution.exec_id diff --git a/geonode/upload/tests/end2end/test_end2end.py b/geonode/upload/tests/end2end/test_end2end.py index 3d93c65f075..f0a493b144e 100644 --- a/geonode/upload/tests/end2end/test_end2end.py +++ b/geonode/upload/tests/end2end/test_end2end.py @@ -176,9 +176,7 @@ class ImporterGeoPackageImportTest(BaseImporterEndToEndTest): def test_import_geopackage(self): self._cleanup_layers(name="stazioni_metropolitana") - payload = { - "base_file": open(self.valid_gkpg, "rb"), - } + payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"} initial_name = "stazioni_metropolitana" self._assertimport(payload, initial_name) self._cleanup_layers(name="stazioni_metropolitana") @@ -188,14 +186,10 @@ def test_import_geopackage(self): def test_import_gpkg_overwrite(self): self._cleanup_layers(name="stazioni_metropolitana") initial_name = "stazioni_metropolitana" - payload = { - "base_file": open(self.valid_gkpg, "rb"), - } + payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"} prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) - payload = { - "base_file": open(self.valid_gkpg, "rb"), - } + payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"} payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated) @@ -210,9 +204,7 @@ class ImporterNoCRSImportTest(BaseImporterEndToEndTest): def test_import_geopackage_with_no_crs_table(self): self._cleanup_layers(name="mattia_test") - payload = { - "base_file": open(self.no_crs_gpkg, "rb"), - } + payload = {"base_file": open(self.no_crs_gpkg, "rb"), "action": "upload"} initial_name = "mattia_test" with self.assertLogs(level="ERROR") as _log: self._assertimport(payload, initial_name) @@ -235,10 +227,7 @@ def test_import_geopackage_with_no_crs_table_should_raise_error_if_all_layer_are _select_valid_layers.return_value = [] self._cleanup_layers(name="mattia_test") - payload = { - "base_file": open(self.no_crs_gpkg, "rb"), - "store_spatial_file": True, - } + payload = {"base_file": open(self.no_crs_gpkg, "rb"), "store_spatial_file": True, "action": "upload"} with self.assertLogs(level="ERROR") as _log: self.client.force_login(self.admin) @@ -261,9 +250,7 @@ def test_import_geojson(self): self._cleanup_layers(name="valid") - payload = { - "base_file": open(self.valid_geojson, "rb"), - } + payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"} initial_name = "valid" self._assertimport(payload, initial_name) @@ -273,14 +260,10 @@ def test_import_geojson(self): @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_import_geojson_overwrite(self): self._cleanup_layers(name="valid") - payload = { - "base_file": open(self.valid_geojson, "rb"), - } + payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"} initial_name = "valid" prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) - payload = { - "base_file": open(self.valid_geojson, "rb"), - } + payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"} payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated) @@ -294,9 +277,7 @@ class ImporterGCSVImportTest(BaseImporterEndToEndTest): def test_import_geojson(self): self._cleanup_layers(name="valid") - payload = { - "base_file": open(self.valid_csv, "rb"), - } + payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"} initial_name = "valid" self._assertimport(payload, initial_name) self._cleanup_layers(name="valid") @@ -305,15 +286,11 @@ def test_import_geojson(self): @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_import_csv_overwrite(self): self._cleanup_layers(name="valid") - payload = { - "base_file": open(self.valid_csv, "rb"), - } + payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"} initial_name = "valid" prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) - payload = { - "base_file": open(self.valid_csv, "rb"), - } + payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"} initial_name = "valid" payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk @@ -326,9 +303,7 @@ class ImporterKMLImportTest(BaseImporterEndToEndTest): @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_import_kml(self): self._cleanup_layers(name="sample_point_dataset") - payload = { - "base_file": open(self.valid_kml, "rb"), - } + payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"} initial_name = "sample_point_dataset" self._assertimport(payload, initial_name) self._cleanup_layers(name="sample_point_dataset") @@ -339,14 +314,10 @@ def test_import_kml_overwrite(self): initial_name = "sample_point_dataset" self._cleanup_layers(name="sample_point_dataset") - payload = { - "base_file": open(self.valid_kml, "rb"), - } + payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"} prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) - payload = { - "base_file": open(self.valid_kml, "rb"), - } + payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"} payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated) @@ -359,6 +330,7 @@ class ImporterShapefileImportTest(BaseImporterEndToEndTest): def test_import_shapefile(self): self._cleanup_layers(name="air_Runways") payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()} + payload["action"] = "upload" initial_name = "air_Runways" self._assertimport(payload, initial_name) self._cleanup_layers(name="air_Runways") @@ -369,11 +341,13 @@ def test_import_shapefile_overwrite(self): self._cleanup_layers(name="air_Runways") payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()} + payload["action"] = "upload" initial_name = "air_Runways" prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()} payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk + payload["action"] = "upload" self._assertimport( payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated, keep_resource=True ) @@ -386,9 +360,7 @@ class ImporterRasterImportTest(BaseImporterEndToEndTest): def test_import_raster(self): self._cleanup_layers(name="test_raster") - payload = { - "base_file": open(self.valid_tif, "rb"), - } + payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"} initial_name = "test_raster" self._assertimport(payload, initial_name) self._cleanup_layers(name="test_raster") @@ -399,14 +371,10 @@ def test_import_raster_overwrite(self): initial_name = "test_raster" self._cleanup_layers(name="test_raster") - payload = { - "base_file": open(self.valid_tif, "rb"), - } + payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"} prev_dataset = self._assertimport(payload, initial_name, keep_resource=True) - payload = { - "base_file": open(self.valid_tif, "rb"), - } + payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"} initial_name = "test_raster" payload["overwrite_existing_layer"] = True payload["resource_pk"] = prev_dataset.pk @@ -422,6 +390,7 @@ def test_import_3dtiles(self): "url": "https://raw.githubusercontent.com/CesiumGS/3d-tiles-samples/main/1.1/TilesetWithFullMetadata/tileset.json", "title": "Remote Title", "type": "3dtiles", + "action": "upload", } initial_name = "remote_title" assert_payload = { @@ -438,6 +407,7 @@ def test_import_3dtiles_overwrite(self): "url": "https://raw.githubusercontent.com/CesiumGS/3d-tiles-samples/main/1.1/TilesetWithFullMetadata/tileset.json", "title": "Remote Title", "type": "3dtiles", + "action": "upload", } initial_name = "remote_title" assert_payload = { @@ -485,6 +455,7 @@ def test_import_wms(self): "type": "wms", "lookup": resource_to_take, "parse_remote_metadata": True, + "action": "upload", } initial_name = res.title assert_payload = { diff --git a/geonode/upload/tests/end2end/test_end2end_copy.py b/geonode/upload/tests/end2end/test_end2end_copy.py index 630e16962b4..a66eacc09ca 100644 --- a/geonode/upload/tests/end2end/test_end2end_copy.py +++ b/geonode/upload/tests/end2end/test_end2end_copy.py @@ -85,6 +85,7 @@ def _assertCloning(self, initial_name): # defining the payload payload = QueryDict("", mutable=True) payload.update({"defaults": '{"title":"title_of_the_cloned_resource"}'}) + payload["action"] = "copy" # calling the endpoint response = self.client.put(_url, data=payload, content_type="application/json") @@ -113,6 +114,7 @@ def _assertCloning(self, initial_name): self.assertTrue(schema_entity.name in [y.name for y in resources]) def _import_resource(self, payload, initial_name): + payload["action"] = "upload" _url = reverse("importer_upload") self.client.force_login(get_user_model().objects.get(username="admin")) @@ -147,9 +149,7 @@ class ImporterCopyEnd2EndGpkgTest(BaseClassEnd2End): ) @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_copy_dataset_from_geopackage(self): - payload = { - "base_file": open(self.valid_gkpg, "rb"), - } + payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "copy"} initial_name = "stazioni_metropolitana" # first we need to import a resource with transaction.atomic(): @@ -168,9 +168,7 @@ class ImporterCopyEnd2EndGeoJsonTest(BaseClassEnd2End): ) @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_copy_dataset_from_geojson(self): - payload = { - "base_file": open(self.valid_geojson, "rb"), - } + payload = {"base_file": open(self.valid_geojson, "rb"), "action": "copy"} initial_name = "valid" # first we need to import a resource with transaction.atomic(): @@ -189,6 +187,7 @@ class ImporterCopyEnd2EndShapeFileTest(BaseClassEnd2End): @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_copy_dataset_from_shapefile(self): payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()} + payload["action"] = "copy" initial_name = "air_runways" # first we need to import a resource with transaction.atomic(): @@ -206,9 +205,7 @@ class ImporterCopyEnd2EndKMLTest(BaseClassEnd2End): ) @override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data") def test_copy_dataset_from_kml(self): - payload = { - "base_file": open(self.valid_kml, "rb"), - } + payload = {"base_file": open(self.valid_kml, "rb"), "action": "copy"} initial_name = "sample_point_dataset" # first we need to import a resource with transaction.atomic(): diff --git a/geonode/upload/tests/unit/test_dastore.py b/geonode/upload/tests/unit/test_dastore.py index 361b67f028a..fd9b504c3cf 100644 --- a/geonode/upload/tests/unit/test_dastore.py +++ b/geonode/upload/tests/unit/test_dastore.py @@ -37,11 +37,10 @@ def setUp(self): user=self.user, func_name="create", step="create", - action="import", + action="upload", input_params={ **{"handler_module_path": "geonode.upload.handlers.gpkg.handler.GPKGFileHandler"}, }, - source="importer_copy", ) self.datastore = DataStoreManager( self.files, "geonode.upload.handlers.gpkg.handler.GPKGFileHandler", self.user, execution_id @@ -51,9 +50,8 @@ def setUp(self): user=self.user, func_name="create", step="create", - action="import", + action="upload", input_params={"url": "https://geosolutionsgroup.com"}, - source="importer_copy", ) self.datastore_url = DataStoreManager( self.files, "geonode.upload.handlers.common.remote.BaseRemoteResourceHandler", self.user, execution_id_url diff --git a/geonode/upload/tests/unit/test_orchestrator.py b/geonode/upload/tests/unit/test_orchestrator.py index b4d3871c042..a1f11c1f6ae 100644 --- a/geonode/upload/tests/unit/test_orchestrator.py +++ b/geonode/upload/tests/unit/test_orchestrator.py @@ -44,7 +44,7 @@ def setUpClass(cls): cls.orchestrator = ImportOrchestrator() def test_get_handler(self): - _data = {"base_file": "file.gpkg", "source": "upload"} + _data = {"base_file": "file.gpkg", "action": "upload"} actual = self.orchestrator.get_handler(_data) self.assertIsNotNone(actual) self.assertEqual("geonode.upload.handlers.gpkg.handler.GPKGFileHandler", str(actual)) @@ -102,12 +102,13 @@ def test_create_execution_request(self): } exec_id = self.orchestrator.create_execution_request( user=get_user_model().objects.first(), - func_name=next(iter(handler.get_task_list(action="import"))), - step=next(iter(handler.get_task_list(action="import"))), + func_name=next(iter(handler.get_task_list(action="upload"))), + step=next(iter(handler.get_task_list(action="upload"))), input_params={ "files": {"base_file": "/tmp/file.txt"}, "store_spatial_files": True, }, + action="upload", ) exec_obj = ExecutionRequest.objects.filter(exec_id=exec_id).first() self.assertEqual(count + 1, ExecutionRequest.objects.count()) @@ -120,7 +121,7 @@ def test_perform_next_step(self, mock_celery): handler = self.orchestrator.load_handler("geonode.upload.handlers.gpkg.handler.GPKGFileHandler") _id = self.orchestrator.create_execution_request( user=get_user_model().objects.first(), - func_name=next(iter(handler.get_task_list(action="import"))), + func_name=next(iter(handler.get_task_list(action="upload"))), step="start_import", # adding the first step for the GPKG file input_params={ "files": {"base_file": "/tmp/file.txt"}, @@ -130,7 +131,7 @@ def test_perform_next_step(self, mock_celery): # test under tests self.orchestrator.perform_next_step( _id, - "import", + "upload", step="start_import", handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -144,17 +145,18 @@ def test_perform_last_import_step(self, mock_celery): handler = self.orchestrator.load_handler("geonode.upload.handlers.gpkg.handler.GPKGFileHandler") _id = self.orchestrator.create_execution_request( user=get_user_model().objects.first(), - func_name=next(iter(handler.get_task_list(action="import"))), + func_name=next(iter(handler.get_task_list(action="upload"))), step="geonode.upload.create_geonode_resource", # adding the first step for the GPKG file input_params={ "files": {"base_file": "/tmp/file.txt"}, "store_spatial_files": True, }, + action="upload", ) # test under tests self.orchestrator.perform_next_step( _id, - "import", + "upload", step="geonode.upload.create_geonode_resource", handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -167,7 +169,7 @@ def test_perform_with_error_set_invalid_status(self, mock_celery): handler = self.orchestrator.load_handler("geonode.upload.handlers.gpkg.handler.GPKGFileHandler") _id = self.orchestrator.create_execution_request( user=get_user_model().objects.first(), - func_name=next(iter(handler.get_task_list(action="import"))), + func_name=next(iter(handler.get_task_list(action="upload"))), step="start_import", # adding the first step for the GPKG file input_params={ "files": {"base_file": "/tmp/file.txt"}, @@ -178,7 +180,7 @@ def test_perform_with_error_set_invalid_status(self, mock_celery): with self.assertRaises(Exception): self.orchestrator.perform_next_step( _id, - "import", + "upload", step="start_import", handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) diff --git a/geonode/upload/tests/unit/test_publisher.py b/geonode/upload/tests/unit/test_publisher.py index 0117d0a5760..9d7f7e06e10 100644 --- a/geonode/upload/tests/unit/test_publisher.py +++ b/geonode/upload/tests/unit/test_publisher.py @@ -60,7 +60,7 @@ def test_extract_resource_name_and_crs(self): """ values_found = self.publisher.extract_resource_to_publish( files={"base_file": self.gpkg_path}, - action="import", + action="upload", layer_name="stazioni_metropolitana", ) expected = {"crs": "EPSG:32632", "name": "stazioni_metropolitana"} @@ -75,7 +75,7 @@ def test_extract_resource_name_and_crs_return_empty_if_the_file_does_not_exists( """ values_found = self.publisher.extract_resource_to_publish( files={"base_file": "/wrong/path/file.gpkg"}, - action="import", + action="upload", layer_name="stazioni_metropolitana", ) self.assertListEqual([], values_found) diff --git a/geonode/upload/tests/unit/test_task.py b/geonode/upload/tests/unit/test_task.py index 1f374da4112..6e67b186091 100644 --- a/geonode/upload/tests/unit/test_task.py +++ b/geonode/upload/tests/unit/test_task.py @@ -120,7 +120,7 @@ def test_import_resource_should_rase_exp_if_is_invalid( with self.assertRaises(InvalidInputFileException) as _exc: import_resource( str(exec_id), - action=ExecutionRequestAction.IMPORT.value, + action=ExecutionRequestAction.UPLOAD.value, handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) expected_msg = f"Invalid format type. Request: {str(exec_id)}" @@ -151,7 +151,7 @@ def test_import_resource_should_work( import_resource( str(exec_id), resource_type="gpkg", - action=ExecutionRequestAction.IMPORT.value, + action=ExecutionRequestAction.UPLOAD.value, handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -178,7 +178,7 @@ def test_publish_resource_should_work( step_name="publish_resource", layer_name="dataset3", alternate="alternate_dataset3", - action=ExecutionRequestAction.IMPORT.value, + action=ExecutionRequestAction.UPLOAD.value, handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -224,7 +224,7 @@ def test_publish_resource_if_overwrite_should_call_the_publishing( step_name="publish_resource", layer_name="dataset3", alternate="alternate_dataset3", - action=ExecutionRequestAction.IMPORT.value, + action=ExecutionRequestAction.UPLOAD.value, handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -276,7 +276,7 @@ def test_publish_resource_if_overwrite_should_not_call_the_publishing( step_name="publish_resource", layer_name="dataset3", alternate="alternate_dataset3", - action=ExecutionRequestAction.IMPORT.value, + action=ExecutionRequestAction.UPLOAD.value, handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", ) @@ -304,7 +304,7 @@ def test_create_geonode_resource(self, import_orchestrator): layer_name="foo_dataset", alternate="alternate_foo_dataset", handler_module_path="geonode.upload.handlers.gpkg.handler.GPKGFileHandler", - action="import", + action="upload", ) # Evaluation @@ -401,7 +401,7 @@ def test_rollback_works_as_expected_vector_step( user=get_user_model().objects.get(username=self.user), func_name="dummy_func", step=conf[0], # step name - action="import", + action="upload", input_params={ "files": {"base_file": self.existing_file}, "store_spatial_files": True, @@ -456,7 +456,7 @@ def test_rollback_works_as_expected_raster( user=get_user_model().objects.get(username=self.user), func_name="dummy_func", step=conf[0], # step name - action="import", + action="upload", input_params={ "files": {"base_file": "/tmp/filepath"}, "store_spatial_files": True, diff --git a/geonode/upload/utils.py b/geonode/upload/utils.py index a82147b6253..0e3750e2ece 100644 --- a/geonode/upload/utils.py +++ b/geonode/upload/utils.py @@ -50,6 +50,9 @@ def get_max_upload_parallelism_limit(slug): class ImporterRequestAction(enum.Enum): ROLLBACK = _("rollback") + RESOURCE_METADATA_UPLOAD = _("resource_metadata_upload") + RESOURCE_STYLE_UPLOAD = _("resource_style_upload") + REPLACE = _("replace") def error_handler(exc, exec_id=None): diff --git a/geonode/utils.py b/geonode/utils.py index 514061ca7f0..60261075fa6 100755 --- a/geonode/utils.py +++ b/geonode/utils.py @@ -29,7 +29,6 @@ import requests import tempfile import ipaddress -import itertools import traceback from lxml import etree @@ -1706,13 +1705,35 @@ def get_geonode_app_types(): def get_supported_datasets_file_types(): from django.conf import settings as gn_settings + from geonode.upload.orchestrator import orchestrator """ Return a list of all supported file type in geonode If one of the type provided in the custom type exists in the default is going to override it """ - default_types = settings.SUPPORTED_DATASET_FILE_TYPES + _available_settings = [ + module().supported_file_extension_config + for module in orchestrator.get_handler_registry() + if module().supported_file_extension_config + ] + # injecting the new config required for FE + default_types = [ + { + "id": "zip", + "formats": [ + { + "label": "Zip Archive", + "required_ext": ["zip"], + "optional_ext": ["xml", "sld"], + } + ], + "actions": ["upload", "replace"], + "type": "archive", + } + ] + default_types.extend(_available_settings) + types_module = ( gn_settings.ADDITIONAL_DATASET_FILE_TYPES if hasattr(gn_settings, "ADDITIONAL_DATASET_FILE_TYPES") else [] ) @@ -1730,7 +1751,7 @@ def get_supported_datasets_file_types(): (weight[1], resource_type) for resource_type in supported_types for weight in formats_order - if resource_type.get("format") in weight[0] + if resource_type.get("type") in weight[0] ) # Flatten the list @@ -1738,10 +1759,18 @@ def get_supported_datasets_file_types(): other_resource_types = [ resource_type for resource_type in supported_types - if resource_type.get("format") is None or resource_type.get("format") not in [f[0] for f in formats_order] + if resource_type.get("type") is None or resource_type.get("type") not in [f[0] for f in formats_order] ] return ordered_resource_types + other_resource_types def get_allowed_extensions(): - return list(itertools.chain.from_iterable([_type["ext"] for _type in get_supported_datasets_file_types()])) + """ + The main extension is rappresented by the position 0 of the configuration + that the handlers returns + """ + allowed_extention = [] + for _type in get_supported_datasets_file_types(): + for val in _type["formats"]: + allowed_extention.append(val["required_ext"][0]) + return list(set(allowed_extention))