diff --git a/backend/geonature/core/gn_synthese/imports/actions.py b/backend/geonature/core/gn_synthese/imports/actions.py index fc501b1d53..6b8b7d356f 100644 --- a/backend/geonature/core/gn_synthese/imports/actions.py +++ b/backend/geonature/core/gn_synthese/imports/actions.py @@ -88,7 +88,7 @@ def check_transient_data(task, logger, imprt: TImports): selected_fields = { field_name: fields[field_name] for field_name, source_field in imprt.fieldmapping.items() - if source_field in imprt.columns + if source_field.get("column_src", None) in imprt.columns } init_rows_validity(imprt) task.update_state(state="PROGRESS", meta={"progress": 0.05}) @@ -218,7 +218,15 @@ def update_batch_progress(batch, step): do_nomenclatures_mapping( imprt, entity, - selected_fields, + { + field_name: fields[field_name] + for field_name, mapping in imprt.fieldmapping.items() + if field_name in fields + and ( + mapping.get("column_src", None) in imprt.columns + or mapping.get("default_value") is not None + ) + }, fill_with_defaults=current_app.config["IMPORT"][ "FILL_MISSING_NOMENCLATURE_WITH_DEFAULT_VALUE" ], @@ -339,11 +347,15 @@ def import_data_to_destination(imprt: TImports) -> None: if field_name not in fields: # not a destination field continue field = fields[field_name] + column_src = source_field.get("column_src", None) if field.multi: - if not set(source_field).isdisjoint(imprt.columns): + if not set(column_src).isdisjoint(imprt.columns): insert_fields |= {field} else: - if source_field in imprt.columns: + if ( + column_src in imprt.columns + or source_field.get("default_value", None) is not None + ): insert_fields |= {field} insert_fields -= {fields["unique_dataset_id"]} # Column only used for filling `id_dataset` diff --git a/backend/geonature/core/imports/checks/dataframe/utils.py b/backend/geonature/core/imports/checks/dataframe/utils.py index 4257442559..6dff9ff6d1 100644 --- a/backend/geonature/core/imports/checks/dataframe/utils.py +++ b/backend/geonature/core/imports/checks/dataframe/utils.py @@ -7,7 +7,7 @@ from geonature.utils.env import db -from geonature.core.imports.models import ImportUserError, ImportUserErrorType +from geonature.core.imports.models import ImportUserError, ImportUserErrorType, TImports from geonature.core.imports.utils import generated_fields @@ -101,7 +101,7 @@ def __error_replace(*args, **kwargs): return _error_replace -def report_error(imprt, entity, df, error): +def report_error(imprt: TImports, entity, df, error): """ Reports an error found in the dataframe, updates the validity column and insert the error in the `t_user_errors` table. @@ -147,7 +147,7 @@ def report_error(imprt, entity, df, error): # f'{error_type.name}' # FIXME comment ordered_invalid_rows = sorted(invalid_rows["line_no"]) column = generated_fields.get(error["column"], error["column"]) - column = imprt.fieldmapping.get(column, column) + column = imprt.fieldmapping.get(column, {}).get("column_src", column) # If an error for same import, same column and of the same type already exists, # we concat existing erroneous rows with current rows. stmt = pg_insert(ImportUserError).values( diff --git a/backend/geonature/core/imports/checks/sql/core.py b/backend/geonature/core/imports/checks/sql/core.py index 7fa9077346..c2c62200c3 100644 --- a/backend/geonature/core/imports/checks/sql/core.py +++ b/backend/geonature/core/imports/checks/sql/core.py @@ -36,9 +36,10 @@ def init_rows_validity(imprt: TImports, dataset_name_field: str = "id_dataset"): # as rows with multi-entity field only will raise an ORPHAN_ROW error selected_fields_names = [] for field_name, source_field in imprt.fieldmapping.items(): - if type(source_field) == list: - selected_fields_names.extend(set(source_field) & set(imprt.columns)) - elif source_field in imprt.columns: + column_src = source_field.get("column_src", None) + if type(column_src) == list: + selected_fields_names.extend(set(column_src) & set(imprt.columns)) + elif column_src in imprt.columns: selected_fields_names.append(field_name) for entity in entities: # Select fields associated to this entity *and only to this entity* @@ -64,15 +65,16 @@ def init_rows_validity(imprt: TImports, dataset_name_field: str = "id_dataset"): ) -def check_orphan_rows(imprt): +def check_orphan_rows(imprt: TImports): transient_table = imprt.destination.get_transient_table() # TODO: handle multi-source fields # This is actually not a big issue as multi-source fields are unlikely to also be multi-entity fields. selected_fields_names = [] for field_name, source_field in imprt.fieldmapping.items(): - if type(source_field) == list: - selected_fields_names.extend(set(source_field) & set(imprt.columns)) - elif source_field in imprt.columns: + column_src = source_field.get("column_src", None) + if type(column_src) == list: + selected_fields_names.extend(set(column_src) & set(imprt.columns)) + elif column_src in imprt.columns: selected_fields_names.append(field_name) # Select fields associated to multiple entities AllEntityField = sa.orm.aliased(EntityField) diff --git a/backend/geonature/core/imports/checks/sql/utils.py b/backend/geonature/core/imports/checks/sql/utils.py index e07168b001..403cbca4f1 100644 --- a/backend/geonature/core/imports/checks/sql/utils.py +++ b/backend/geonature/core/imports/checks/sql/utils.py @@ -64,7 +64,7 @@ def report_erroneous_rows( transient_table = imprt.destination.get_transient_table() error_type = ImportUserErrorType.query.filter_by(name=error_type).one() error_column = generated_fields.get(error_column, error_column) - error_column = imprt.fieldmapping.get(error_column, error_column) + error_column = imprt.fieldmapping.get(error_column, {}).get("column_src", error_column) if error_type.level in level_validity_mapping: assert entity is not None cte = ( diff --git a/backend/geonature/core/imports/models.py b/backend/geonature/core/imports/models.py index 958b359a9d..e95ff8ac6e 100644 --- a/backend/geonature/core/imports/models.py +++ b/backend/geonature/core/imports/models.py @@ -479,6 +479,7 @@ class BibFields(db.Model): fr_label = db.Column(db.Unicode, nullable=False) eng_label = db.Column(db.Unicode, nullable=True) type_field = db.Column(db.Unicode, nullable=True) + type_field_params = db.Column(MutableDict.as_mutable(JSON)) mandatory = db.Column(db.Boolean, nullable=False) autogenerated = db.Column(db.Boolean, nullable=False) mnemonique = db.Column(db.Unicode, db.ForeignKey(BibNomenclaturesTypes.mnemonique)) @@ -608,7 +609,7 @@ def optional_conditions_to_jsonschema(name_field: str, optional_conditions: Iter "if": { "not": { "properties": { - field_opt: {"type": "string"} for field_opt in optional_conditions + field_opt: {"type": "object"} for field_opt in optional_conditions } } }, @@ -726,9 +727,27 @@ def validate_values(field_mapping_json): "type": "object", "properties": { field.name_field: { - "type": ( - "boolean" if field.autogenerated else ("array" if field.multi else "string") - ), + "type": "object", + "properties": { + "column_src": { + "type": ( + "boolean" + if field.autogenerated + else ("array" if field.multi else "string") + ), + }, + "default_value": { + "oneOf": [ + {"type": "boolean"}, + {"type": "number"}, + {"type": "string"}, + {"type": "array"}, + ] + }, + }, + "required": [], + "additionalProperties": False, + "anyOf": [{"required": ["column_src"]}, {"required": ["default_value"]}], } for field in fields }, diff --git a/backend/geonature/core/imports/routes/fields.py b/backend/geonature/core/imports/routes/fields.py index c1ad4b52fe..beaf003800 100644 --- a/backend/geonature/core/imports/routes/fields.py +++ b/backend/geonature/core/imports/routes/fields.py @@ -70,8 +70,11 @@ def get_fields(scope, destination): fields=[ "id_field", "name_field", + "type_field", + "type_field_params", "fr_label", "eng_label", + "mnemonique", "mandatory", "autogenerated", "multi", diff --git a/backend/geonature/core/imports/routes/imports.py b/backend/geonature/core/imports/routes/imports.py index 12284a1e3f..af3e6d1e63 100644 --- a/backend/geonature/core/imports/routes/imports.py +++ b/backend/geonature/core/imports/routes/imports.py @@ -177,6 +177,17 @@ def upload_file(scope, imprt, destination=None): # destination is set when impr assert destination author = g.current_user f = request.files["file"] + field_to_map_str = request.form.get("fieldsToMap") + if field_to_map_str: + fields_to_map = json.loads(field_to_map_str) + # NOTES: Pas possible d'utiliser le validate value ici + # try: + # FieldMapping.validate_values(fields_to_map) + # except ValueError as e: + # raise BadRequest(*e.args) + else: + fields_to_map = {} + size = get_file_size(f) # value in config file is in Mo max_file_size = current_app.config["IMPORT"]["MAX_FILE_SIZE"] * 1024 * 1024 @@ -203,6 +214,8 @@ def upload_file(scope, imprt, destination=None): # destination is set when impr if not dataset.active: raise Forbidden("Le jeu de données est fermé.") imprt = TImports(destination=destination, dataset=dataset) + if fields_to_map: + imprt.fieldmapping = fields_to_map imprt.authors.append(author) db.session.add(imprt) else: @@ -368,8 +381,11 @@ def get_import_values(scope, imprt): # this nomenclated field is not mapped continue source = imprt.fieldmapping[field.name_field] - if source not in imprt.columns: - # the file do not contain this field expected by the mapping + if ( + source.get("column_src", None) not in imprt.columns + and source.get("default_value", None) is None + ): + # the file do not contain this field expected by the mapping and there is no default value continue # TODO: vérifier que l’on a pas trop de valeurs différentes ? column = field.source_column diff --git a/backend/geonature/core/imports/utils.py b/backend/geonature/core/imports/utils.py index 780b110a48..63454d88cd 100644 --- a/backend/geonature/core/imports/utils.py +++ b/backend/geonature/core/imports/utils.py @@ -4,7 +4,7 @@ import json from enum import IntEnum from datetime import datetime, timedelta -from typing import IO, Any, Dict, Iterable, List, Optional, Set, Tuple +from typing import IO, Any, Dict, Iterable, List, Optional, Set, Tuple, Union from flask import current_app, render_template import sqlalchemy as sa @@ -163,7 +163,9 @@ def detect_separator(file_: IO, encoding: str) -> Optional[str]: return dialect.delimiter -def preprocess_value(dataframe: pd.DataFrame, field: BibFields, source_col: str) -> pd.Series: +def preprocess_value( + dataframe: pd.DataFrame, field: BibFields, source_col: Union[str, List[str]], default_value: Any +) -> pd.Series: """ Preprocesses values in a DataFrame depending if the field contains multiple values (e.g. additional_data) or not. @@ -184,8 +186,14 @@ def preprocess_value(dataframe: pd.DataFrame, field: BibFields, source_col: str) """ def build_additional_data(columns: dict): + try: + default_values = json.loads(default_value) + except Exception: + default_values = {} result = {} for key, value in columns.items(): + if value is None or value == "": + value = default_values.get(key, None) if value is None: continue try: @@ -198,9 +206,17 @@ def build_additional_data(columns: dict): if field.multi: assert type(source_col) is list + for col in source_col: + if col not in dataframe.columns: + dataframe[col] = None col = dataframe[source_col].apply(build_additional_data, axis=1) else: + if source_col not in dataframe.columns: + dataframe[source_col] = None col = dataframe[source_col] + if default_value is not None: + col = col.replace({"": default_value, None: default_value}) + return col @@ -244,8 +260,10 @@ def insert_import_data_in_transient_table(imprt: TImports) -> int: } data.update( { - dest_field: preprocess_value(chunk, source_field["field"], source_field["value"]) - for dest_field, source_field in fieldmapping.items() + dest_field: preprocess_value( + chunk, mapping["field"], mapping["column_src"], mapping["default_value"] + ) + for dest_field, mapping in fieldmapping.items() } ) # XXX keep extra_fields in t_imports_synthese? or add config argument? @@ -293,21 +311,25 @@ def build_fieldmapping( for field in fields: if field.name_field in imprt.fieldmapping: + mapping = imprt.fieldmapping[field.name_field] + column_src = mapping.get("column_src", None) + default_value = mapping.get("default_value", None) if field.multi: - correct = list(set(columns) & set(imprt.fieldmapping[field.name_field])) + correct = list(set(columns) & set(column_src)) if len(correct) > 0: fieldmapping[field.source_column] = { - "value": correct, "field": field, + "column_src": correct, + "default_value": default_value, } used_columns.extend(correct) else: - if imprt.fieldmapping[field.name_field] in columns: - fieldmapping[field.source_column] = { - "value": imprt.fieldmapping[field.name_field], - "field": field, - } - used_columns.append(imprt.fieldmapping[field.name_field]) + fieldmapping[field.source_column] = { + "field": field, + "column_src": column_src, + "default_value": default_value, + } + used_columns.append(column_src) return fieldmapping, used_columns @@ -442,8 +464,11 @@ def get_mapping_data(import_: TImports, entity: Entity): fields = {ef.field.name_field: ef.field for ef in entity.fields} selected_fields = { field_name: fields[field_name] - for field_name, source_field in import_.fieldmapping.items() - if source_field in import_.columns and field_name in fields + for field_name, mapping in import_.fieldmapping.items() + if ( + mapping.get("column_src") in import_.columns or mapping.get("default_value") is not None + ) + and field_name in fields } source_cols = set() for field in selected_fields.values(): diff --git a/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py b/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py new file mode 100644 index 0000000000..3e36f384c2 --- /dev/null +++ b/backend/geonature/migrations/versions/imports/a94bea44ab56_type_field_conforms_to_type_widget.py @@ -0,0 +1,59 @@ +"""bib_field.type_field conforms to dynamic_form.type_widget + +Revision ID: a94bea44ab56 +Revises: e43b01a18850 +Create Date: 2024-12-11 15:44:52.912515 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "a94bea44ab56" +down_revision = "e43b01a18850" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """ + ALTER TABLE gn_imports.bib_fields ADD type_field_params jsonb NULL; + """ + ) + op.execute( + """ + UPDATE gn_imports.bib_fields + SET type_field = + case + -- mnemonique is handled front side + WHEN mnemonique IS NOT NULL AND mnemonique != '' THEN NULL + + -- multi is handled front side + WHEN multi = true THEN null + + WHEN type_field IN ('integer', 'real') THEN 'number' + + WHEN type_field IN ('geometry', 'jsonb', 'json', 'wkt') THEN 'textarea' + + WHEN type_field LIKE 'timestamp%' THEN 'date' + + WHEN type_field ~ '^character varying\((\d+)\)$' + AND COALESCE(substring(type_field FROM '\d+')::int, 0) > 68 THEN 'textarea' + + -- Default: garder la valeur actuelle. + ELSE NULL + END; + """ + ) + + +def downgrade(): + op.execute( + """ + ALTER TABLE gn_imports.bib_fields DROP COLUMN type_field_params; + + """ + ) diff --git a/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py b/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py new file mode 100644 index 0000000000..066ccd52de --- /dev/null +++ b/backend/geonature/migrations/versions/imports/e43b01a18850_fieldmapping_default_values.py @@ -0,0 +1,55 @@ +"""fieldmapping default values + +Revision ID: e43b01a18850 +Revises: 2b0b3bd0248c +Create Date: 2024-11-28 17:33:06.243150 + +""" + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "e43b01a18850" +down_revision = "2b0b3bd0248c" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute( + """UPDATE gn_imports.t_fieldmappings + SET "values" = ( + SELECT json_object_agg(key, json_build_object('column_src', value)) + FROM json_each("values") + ) + WHERE "values" IS NOT NULL;""" + ) + op.execute( + """UPDATE gn_imports.t_imports + SET fieldmapping = ( + SELECT json_object_agg(key, json_build_object('column_src', value)) + FROM json_each(fieldmapping) + ) + WHERE fieldmapping IS NOT NULL;""" + ) + + +def downgrade(): + op.execute( + """UPDATE gn_imports.t_fieldmappings + SET "values" = ( + SELECT json_object_agg(key, value->'column_src') + FROM json_each("values") + ) + WHERE "values" IS NOT NULL;""" + ) + op.execute( + """UPDATE gn_imports.t_imports + SET fieldmapping = ( + SELECT json_object_agg(key, value->'column_src') + FROM json_each(fieldmapping) + ) + WHERE fieldmapping IS NOT NULL;""" + ) diff --git a/backend/geonature/tests/imports/jsonschema_definitions.py b/backend/geonature/tests/imports/jsonschema_definitions.py index 42a94a4520..a968b52f32 100644 --- a/backend/geonature/tests/imports/jsonschema_definitions.py +++ b/backend/geonature/tests/imports/jsonschema_definitions.py @@ -36,7 +36,16 @@ ], }, "type_field": { - "type": "string", + "type": [ + "string", + "null", + ], + }, + "type_field_params": { + "type": [ + "object", + "null", + ], }, "synthese_field": { "type": "boolean", diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index b0fe44064f..4865cde719 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -60,7 +60,7 @@ def fieldmapping(occhab_destination): .unique() .all() ) - return {field.name_field: field.name_field for field in fields} + return {field.name_field: {"column_src": field.name_field} for field in fields} @pytest.fixture() diff --git a/backend/geonature/tests/imports/test_imports_synthese.py b/backend/geonature/tests/imports/test_imports_synthese.py index 21d6ea956b..b7308bd313 100644 --- a/backend/geonature/tests/imports/test_imports_synthese.py +++ b/backend/geonature/tests/imports/test_imports_synthese.py @@ -208,11 +208,15 @@ def fieldmapping(import_file_name, autogenerate): else: bib_fields = db.session.scalars(sa.select(BibFields).filter_by(display=True)).unique().all() return { - field.name_field: ( - autogenerate - if field.autogenerated - else ([field.name_field, "additional_data2"] if field.multi else field.name_field) - ) + field.name_field: { + "column_src": ( + autogenerate + if field.autogenerated + else ( + [field.name_field, "additional_data2"] if field.multi else field.name_field + ) + ) + } for field in bib_fields } @@ -927,16 +931,20 @@ def test_import_valid_file(self, users, datasets): .unique() .scalar_one() ) + fieldmapping_values = fieldmapping.values.copy() + fieldmapping_values.update( + {"count_max": fieldmapping_values.get("count_max", {}) | {"default_value": 5}} + ) r = self.client.post( url_for("import.set_import_field_mapping", import_id=imprt.id_import), - data=fieldmapping.values, + data=fieldmapping_values, ) assert r.status_code == 200, r.data validate_json( r.json, {"definitions": jsonschema_definitions, "$ref": "#/definitions/import"}, ) - assert r.json["fieldmapping"] == fieldmapping.values + assert r.json["fieldmapping"] == fieldmapping_values # Loading step r = self.client.post(url_for("import.load_import", import_id=imprt.id_import)) diff --git a/backend/geonature/tests/imports/test_mappings.py b/backend/geonature/tests/imports/test_mappings.py index 4988287198..38dbab98d7 100644 --- a/backend/geonature/tests/imports/test_mappings.py +++ b/backend/geonature/tests/imports/test_mappings.py @@ -37,11 +37,13 @@ def mappings(synthese_destination, users): .all() ) fieldmapping_values = { - field.name_field: ( - True - if field.autogenerated - else ([field.name_field] if field.multi else field.name_field) - ) + field.name_field: { + "column_src": ( + True + if field.autogenerated + else ([field.name_field] if field.multi else field.name_field) + ) + } for field in bib_fields } @@ -266,11 +268,11 @@ def get_mapping(mapping): def test_add_field_mapping(self, users, mappings): fieldmapping = { - "WKT": "geometrie", - "nom_cite": "nomcite", - "cd_nom": "cdnom", - "cd_hab": "cdhab", - "observers": "observateurs", + "WKT": {"column_src": "geometrie"}, + "nom_cite": {"column_src": "nomcite"}, + "cd_nom": {"column_src": "cdnom"}, + "cd_hab": {"column_src": "cdhab"}, + "observers": {"column_src": "observateurs"}, } url = url_for("import.add_mapping", mappingtype="field") @@ -296,7 +298,7 @@ def test_add_field_mapping(self, users, mappings): label=mappings["content_public"].label, ) - r = self.client.post(url, data={"unexisting": "source column"}) + r = self.client.post(url, data={"unexisting": {"column_src": "source column"}}) assert r.status_code == BadRequest.code r = self.client.post(url, data=fieldmapping) @@ -304,7 +306,7 @@ def test_add_field_mapping(self, users, mappings): fieldmapping.update( { - "date_min": "date_debut", + "date_min": {"column_src": "date_debut"}, } ) r = self.client.post(url, data=fieldmapping) @@ -406,7 +408,7 @@ def test_update_field_mapping_values(self, users, mappings): fm = mappings["field_public"] fieldvalues_update = deepcopy(fm.values) - fieldvalues_update["WKT"] = "WKT2" + fieldvalues_update["WKT"] = {"column_src": "WKT2"} fieldvalues_should = deepcopy(fieldvalues_update) del fieldvalues_update["validator"] # should not removed from mapping! r = self.client.post( diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py index 1ecddc529c..a0fd94a8a0 100644 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/imports/actions.py @@ -474,16 +474,20 @@ def import_data_to_destination(imprt: TImports) -> None: ef.field.name_field: ef.field for ef in entity.fields if ef.field.dest_field != None } insert_fields = {fields["id_station"]} - for field_name, source_field in imprt.fieldmapping.items(): + for field_name, mapping in imprt.fieldmapping.items(): if field_name not in fields: # not a destination field continue field = fields[field_name] + column_src = mapping.get("column_src", None) if field.multi: # TODO@TestImportsOcchab.test_import_valid_file: add testcase - if not set(source_field).isdisjoint(imprt.columns): + if not set(column_src).isdisjoint(imprt.columns): insert_fields |= {field} else: - if source_field in imprt.columns: + if ( + column_src in imprt.columns + or mapping.get("default_value", None) is not None + ): insert_fields |= {field} if entity.code == "station": # unique_dataset_id is replaced with id_dataset diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py new file mode 100644 index 0000000000..d4b6da7b1c --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/650f1d749b3b_add_default_mapping.py @@ -0,0 +1,175 @@ +"""add_default_mapping + +Revision ID: 650f1d749b3b +Revises: c1a6b0793360 +Create Date: 2024-12-12 13:21:49.612529 + +""" + +from alembic import op +import sqlalchemy as sa +from sqlalchemy.orm import Session +from sqlalchemy.sql import table, column +from sqlalchemy.dialects.postgresql import JSON +from sqlalchemy.ext.mutable import MutableDict + + +# revision identifiers, used by Alembic. +revision = "650f1d749b3b" +down_revision = "c1a6b0793360" +branch_labels = None +depends_on = "e43b01a18850" + + +def get_models(conn): + metadata = sa.MetaData(bind=conn) + MappingTemplate = sa.Table("t_mappings", metadata, schema="gn_imports", autoload_with=conn) + FieldMapping = sa.Table("t_fieldmappings", metadata, schema="gn_imports", autoload_with=conn) + return MappingTemplate, FieldMapping + + +def upgrade(): + conn = op.get_bind() + MappingTemplate, FieldMapping = get_models(conn) + + session = Session(bind=op.get_bind()) + id_destination_occhab = session.scalar( + sa.text("SELECT id_destination FROM gn_imports.bib_destinations WHERE code = 'occhab'") + ) + + id_occhab_mapping = session.execute( + sa.select(MappingTemplate.c.id).where(MappingTemplate.c.label == "Occhab") + ).scalar() + + if not id_occhab_mapping: + id_occhab_mapping = session.execute( + sa.insert(MappingTemplate) + .values( + label="Occhab", + type="FIELD", + active=True, + public=True, + id_destination=id_destination_occhab, + ) + .returning(MappingTemplate.c.id) + ).first()[0] + + existing_occhab_fieldmapping = session.execute( + sa.select(FieldMapping.c.id).where(FieldMapping.c.id == id_occhab_mapping) + ).first() + + if not existing_occhab_fieldmapping: + session.execute( + sa.insert(FieldMapping).values( + id=id_occhab_mapping, + values={ + "WKT": {"column_src": "geometry"}, + "altitude_max": {"column_src": "altitude_max"}, + "altitude_min": {"column_src": "altitude_min"}, + "area": {"column_src": "area"}, + "cd_hab": {"column_src": "cd_hab"}, + "comment": {"column_src": "comment"}, + "date_max": {"column_src": "date_fin"}, + "date_min": {"column_src": "date_debut"}, + "depth_max": {"column_src": "depth_max"}, + "depth_min": {"column_src": "depth_min"}, + "id_nomenclature_area_surface_calculation": { + "column_src": "methode_calcul_surface" + }, + "id_nomenclature_exposure": {"column_src": "exposition"}, + "id_nomenclature_geographic_object": {"column_src": "nature_objet_geo"}, + "id_station_source": {"column_src": "id_station"}, + "nom_cite": {"column_src": "nom_cite"}, + "observers_txt": {"column_src": "observateurs"}, + "technical_precision": {"column_src": "precision_technique"}, + "unique_dataset_id": {"column_src": "uuid_jdd"}, + "unique_id_sinp_habitat": {"column_src": "uuid_habitat"}, + "unique_id_sinp_station": {"column_src": "uuid_station"}, + }, + ) + ) + + id_mapping_sinp = session.execute( + sa.select(MappingTemplate.c.id).where( + MappingTemplate.c.label == "Occurrences d'habitats SINP" + ) + ).scalar() + + if not id_mapping_sinp: + id_mapping_sinp = session.execute( + sa.insert(MappingTemplate) + .values( + label="Occurrences d'habitats SINP", + type="FIELD", + active=True, + public=True, + id_destination=id_destination_occhab, + ) + .returning(MappingTemplate.c.id) + ).first()[0] + + existing_sinp_fieldmapping = session.execute( + sa.select(FieldMapping.c.id).where(FieldMapping.c.id == id_mapping_sinp) + ).first() + + if not existing_sinp_fieldmapping: + session.execute( + sa.insert(FieldMapping).values( + id=id_mapping_sinp, + values={ + "WKT": {"column_src": "WKT"}, + "altitude_max": {"column_src": "altMax"}, + "altitude_min": {"column_src": "altMin"}, + "area": {"column_src": "surf"}, + "cd_hab": {"column_src": "cdHab"}, + "comment": {"column_src": "comment"}, + "date_max": {"column_src": "dateFin"}, + "date_min": {"column_src": "dateDebut"}, + "depth_max": {"column_src": "profMax"}, + "depth_min": {"column_src": "profMin"}, + "determiner": {"column_src": "persDeterm"}, + "id_habitat": {"column_src": "idOrigine"}, + "id_nomenclature_abundance": {"column_src": "abondHab"}, + "id_nomenclature_area_surface_calculation": { + "column_src": "methodeCalculSurface" + }, + "id_nomenclature_collection_technique": {"column_src": "techCollec"}, + "id_nomenclature_community_interest": { + "column_src": "habitatInteretCommunautaire " + }, + "id_nomenclature_determination_type": {"column_src": "typeDeterm"}, + "id_nomenclature_exposure": {"column_src": "exposition"}, + "id_nomenclature_geographic_object": {"column_src": "natObjGeo"}, + "id_nomenclature_sensitivity": {"column_src": "sensibiliteHab"}, + "id_station_source": {"column_src": "idOrigEvt"}, + "is_habitat_complex": {"column_src": "mosaique"}, + "nom_cite": {"column_src": "nomCite"}, + "numerization_scale": {"column_src": "echelleNumerisation"}, + "observers_txt": {"column_src": "observer"}, + "precision": {"column_src": "precisGeo"}, + "recovery_percentage": {"column_src": "recouv"}, + "station_name": {"column_src": "nomStation"}, + "technical_precision": {"column_src": "precisionTechnique"}, + "unique_dataset_id": {"column_src": "jddMetaId"}, + "unique_id_sinp_grp_phyto": {"column_src": "relevePhyto"}, + "unique_id_sinp_habitat": {"column_src": "idSinpHab"}, + "unique_id_sinp_station": {"column_src": "permId"}, + }, + ) + ) + + session.commit() + session.close() + + +def downgrade(): + conn = op.get_bind() + MappingTemplate, FieldMapping = get_models(conn) + + cte = ( + sa.select(MappingTemplate.c.id) + .where(MappingTemplate.c.label.in_(["Occhab", "Occurrences d'habitats SINP"])) + .cte("mapping_cte") + ) + op.execute(sa.delete(FieldMapping).where(FieldMapping.c.id == cte.c.id)) + op.execute(sa.delete(MappingTemplate).where(MappingTemplate.c.id == cte.c.id)) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py deleted file mode 100644 index 0914c68b40..0000000000 --- a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_add_default_mapping.py +++ /dev/null @@ -1,145 +0,0 @@ -"""add_default_mapping - -Revision ID: aed662bbd88a -Revises: fcf1e091b636 -Create Date: 2024-07-19 11:04:52.224271 - -""" - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.orm import Session -from sqlalchemy.sql import table, column -from sqlalchemy.dialects.postgresql import JSON -from sqlalchemy.ext.mutable import MutableDict - -# revision identifiers, used by Alembic. -revision = "aed662bbd88a" -down_revision = "69494f900cab" -branch_labels = None -depends_on = None - - -def get_models(conn): - metadata = sa.MetaData(bind=conn) - MappingTemplate = sa.Table("t_mappings", metadata, schema="gn_imports", autoload_with=conn) - FieldMapping = sa.Table("t_fieldmappings", metadata, schema="gn_imports", autoload_with=conn) - return MappingTemplate, FieldMapping - - -def upgrade(): - conn = op.get_bind() - MappingTemplate, FieldMapping = get_models(conn) - - session = Session(bind=op.get_bind()) - id_destination_occhab = session.scalar( - sa.text("SELECT id_destination FROM gn_imports.bib_destinations WHERE code = 'occhab'") - ) - - id_occhab_mapping = session.execute( - sa.insert(MappingTemplate) - .values( - label="Occhab", - type="FIELD", - active=True, - public=True, - id_destination=id_destination_occhab, - ) - .returning(MappingTemplate.c.id) - ).first()[0] - - session.execute( - sa.insert(FieldMapping).values( - id=id_occhab_mapping, - values={ - "WKT": "geometry", - "altitude_max": "altitude_max", - "altitude_min": "altitude_min", - "area": "area", - "cd_hab": "cd_hab", - "comment": "comment", - "date_max": "date_fin", - "date_min": "date_debut", - "depth_max": "depth_max", - "depth_min": "depth_min", - "id_nomenclature_area_surface_calculation": "methode_calcul_surface", - "id_nomenclature_exposure": "exposition", - "id_nomenclature_geographic_object": "nature_objet_geo", - "id_station_source": "id_station", - "nom_cite": "nom_cite", - "observers_txt": "observateurs", - "technical_precision": "precision_technique", - "unique_dataset_id": "uuid_jdd", - "unique_id_sinp_habitat": "uuid_habitat", - "unique_id_sinp_station": "uuid_station", - }, - ) - ) - - id_mapping_sinp = session.execute( - sa.insert(MappingTemplate) - .values( - label="Occurrences d'habitats SINP", - type="FIELD", - active=True, - public=True, - id_destination=id_destination_occhab, - ) - .returning(MappingTemplate.c.id) - ).first()[0] - - session.execute( - sa.insert(FieldMapping).values( - id=id_mapping_sinp, - values={ - "WKT": "WKT", - "altitude_max": "altMax", - "altitude_min": "altMin", - "area": "surf", - "cd_hab": "cdHab", - "comment": "comment", - "date_max": "dateFin", - "date_min": "dateDebut", - "depth_max": "profMax", - "depth_min": "profMin", - "determiner": "persDeterm", - "id_habitat": "idOrigine", - "id_nomenclature_abundance": "abondHab", - "id_nomenclature_area_surface_calculation": "methodeCalculSurface", - "id_nomenclature_collection_technique": "techCollec", - "id_nomenclature_community_interest": "habitatInteretCommunautaire ", - "id_nomenclature_determination_type": "typeDeterm", - "id_nomenclature_exposure": "exposition", - "id_nomenclature_geographic_object": "natObjGeo", - "id_nomenclature_sensitivity": "sensibiliteHab", - "id_station_source": "idOrigEvt", - "is_habitat_complex": "mosaique", - "nom_cite": "nomCite", - "numerization_scale": "echelleNumerisation", - "observers_txt": "observer", - "precision": "precisGeo", - "recovery_percentage": "recouv", - "station_name": "nomStation", - "technical_precision": "precisionTechnique", - "unique_dataset_id": "jddMetaId", - "unique_id_sinp_grp_phyto": "relevePhyto", - "unique_id_sinp_habitat": "idSinpHab", - "unique_id_sinp_station": "permId", - }, - ) - ) - session.commit() - session.close() - - -def downgrade(): - conn = op.get_bind() - MappingTemplate, FieldMapping = get_models(conn) - - cte = ( - sa.select(MappingTemplate.c.id) - .where(MappingTemplate.c.label.in_(["OccHab", "Occurrences d'habitats SINP"])) - .cte("mapping_cte") - ) - op.execute(sa.delete(FieldMapping).where(FieldMapping.c.id == cte.c.id)) - op.execute(sa.delete(MappingTemplate).where(MappingTemplate.c.id == cte.c.id)) diff --git a/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py new file mode 100644 index 0000000000..b6a10e8676 --- /dev/null +++ b/contrib/gn_module_occhab/backend/gn_module_occhab/migrations/aed662bbd88a_do_nothing.py @@ -0,0 +1,21 @@ +"""do_nothing + +Revision ID: aed662bbd88a +Revises: fcf1e091b636 +Create Date: 2024-07-19 11:04:52.224271 + +""" + +# revision identifiers, used by Alembic. +revision = "aed662bbd88a" +down_revision = "69494f900cab" +branch_labels = None +depends_on = None + + +def upgrade(): + pass + + +def downgrade(): + pass diff --git a/frontend/cypress/e2e/import/fieldmapping-context-from-destination-spec.js b/frontend/cypress/e2e/import/fieldmapping-context-from-destination-spec.js new file mode 100644 index 0000000000..75a51a337b --- /dev/null +++ b/frontend/cypress/e2e/import/fieldmapping-context-from-destination-spec.js @@ -0,0 +1,106 @@ +import { USERS } from './constants/users'; +import { TIMEOUT_WAIT, VIEWPORTS } from './constants/common'; +import { FILES } from './constants/files'; + +const USER = USERS[0]; +const VIEWPORT = VIEWPORTS[0]; + +function testQueryParamField(dataQa, paramName, expectedValue, fieldType) { + if (fieldType === 'textarea' || fieldType === 'text' || fieldType === 'number') { + cy.get(dataQa) + .find(`[data-qa^="field-${fieldType}-${paramName}_default_value_"]`) + .should('have.value', expectedValue); + } +} + +const paramsByDestination = [ + { + destination: 'synthese', + queryParams: [ + { + paramsName: 'nom_cite', + paramsValue: 'test_nomcite', + fieldType: 'textarea', + isTypeComp: false, + expectedValue: 'test_nomcite', + }, + { + paramsName: 'altitude_max', + paramsValue: 10, + fieldType: 'number', + isTypeComp: false, + expectedValue: 10, + }, + { + paramsName: 'date_min', + paramsValue: '2024-12-12', + fieldParentType: 'field-date', + fieldType: 'date', + isTypeComp: true, + expectedValue: '12/12/2024', + }, + { + paramsName: 'id_nomenclature_geo_object_nature', + paramsValue: 'Inventoriel', + fieldParentType: 'field-nomenclature', + fieldType: 'nomenclature', + isTypeComp: true, + expectedValue: 'Inventoriel', + }, + ], + }, + // { + // destination: 'occhab', + // queryParams: [ + // { paramsName: 'nom_cite', paramsValue: 'test_nomcite' }, + // { paramsName: 'date_min', paramsValue: '2024-12-12' }, + // ], + // }, +]; + +describe('Import - Upload step', () => { + context(`viewport: ${VIEWPORT.width}x${VIEWPORT.height}`, () => { + beforeEach(() => { + cy.viewport(VIEWPORT.width, VIEWPORT.height); + cy.geonatureLogin(USER.login.username, USER.login.password); + cy.wait(TIMEOUT_WAIT); + }); + + paramsByDestination.forEach(({ destination, queryParams }) => { + it(`Should handle for destination: ${destination}`, () => { + const urlParams = queryParams + .map((param) => `${param.paramsName}=${param.paramsValue}`) + .join('&'); + cy.visit(`/#/import/${destination}/process/upload?${urlParams}`); + + cy.pickDataset(USER.dataset); + cy.loadImportFile(FILES.synthese.valid.fixture); + cy.configureImportFile(); + + queryParams.forEach(({ paramsName, paramsValue, fieldType, isTypeComp, expectedValue }) => { + let dataQa = `[data-qa="import-fieldmapping-theme-${paramsName}"]`; + + // Récupérer et vérifier la valeur en fonction du type de champ + if (!isTypeComp) { + testQueryParamField(dataQa, paramsName, expectedValue, fieldType); + } else if (fieldType === 'date' || fieldType === 'nomenclature') { + // Si c'est un champ de type 'date' ou 'nomenclature' + dataQa = `[data-qa="field-${fieldType}-${paramsName}_default_value"]`; + cy.get(dataQa) + .find(`input`) + .each(($el) => cy.wrap($el).scrollIntoView().should('be.visible')); + + if (fieldType === 'date') { + cy.get(dataQa).find('[data-qa="input-date"]').should('have.value', expectedValue); + } else if (fieldType === 'nomenclature') { + cy.get(`${dataQa} .ng-value-container .ng-value-label`).should( + 'have.text', + expectedValue + ); + } + } + }); + }); + }); + }); +}); diff --git a/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html b/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html index 9d96cd8045..6063bbb6f3 100644 --- a/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html +++ b/frontend/src/app/GN2CommonModule/form/dynamic-form/dynamic-form.component.html @@ -50,6 +50,7 @@ [formControl]="form.get(formDefComp['attribut_name'])" id="{{ formDefComp['attribut_name'] }}_{{ rand }}" type="text" + data-qa="field-text-{{ formDefComp['attribut_name'] }}_{{ rand }}" />
@@ -93,6 +97,7 @@ class="form-control form-control-sm" id="{{ formDefComp['attribut_name'] }}_{{ rand }}" [formControl]="form.get(formDefComp['attribut_name'])" + [attr.data-qa]="'field-select-' + '-' + formDefComp['attribut_name'] + '_' + rand" >