diff --git a/antarest/core/tasks/service.py b/antarest/core/tasks/service.py index 92ddc203de..0ac0e9d875 100644 --- a/antarest/core/tasks/service.py +++ b/antarest/core/tasks/service.py @@ -240,7 +240,7 @@ def _launch_task( message=custom_event_messages.start if custom_event_messages is not None else f"Task {task.id} added", - ).dict(), + ).model_dump(), permissions=PermissionInfo(owner=request_params.user.impersonator), ) ) @@ -349,7 +349,7 @@ def _run_task( message=custom_event_messages.running if custom_event_messages is not None else f"Task {task_id} is running", - ).dict(), + ).model_dump(), permissions=PermissionInfo(public_mode=PublicMode.READ), channel=EventChannelDirectory.TASK + task_id, ) @@ -395,7 +395,7 @@ def _run_task( if custom_event_messages is not None else f"Task {task_id} {event_msg}" ), - ).dict(), + ).model_dump(), permissions=PermissionInfo(public_mode=PublicMode.READ), channel=EventChannelDirectory.TASK + task_id, ) @@ -420,7 +420,7 @@ def _run_task( self.event_bus.push( Event( type=EventType.TASK_FAILED, - payload=TaskEventPayload(id=task_id, message=message).dict(), + payload=TaskEventPayload(id=task_id, message=message).model_dump(), permissions=PermissionInfo(public_mode=PublicMode.READ), channel=EventChannelDirectory.TASK + task_id, ) diff --git a/antarest/eventbus/web.py b/antarest/eventbus/web.py index cae0ffb99f..fb98dab6c5 100644 --- a/antarest/eventbus/web.py +++ b/antarest/eventbus/web.py @@ -83,7 +83,7 @@ def configure_websockets(application: FastAPI, config: Config, event_bus: IEvent manager = ConnectionManager() async def send_event_to_ws(event: Event) -> None: - event_data = event.dict() + event_data = event.model_dump() del event_data["permissions"] del event_data["channel"] await manager.broadcast(json.dumps(event_data), event.permissions, event.channel) diff --git a/antarest/launcher/adapters/abstractlauncher.py b/antarest/launcher/adapters/abstractlauncher.py index 84c4c6316e..78e4f0dc7b 100644 --- a/antarest/launcher/adapters/abstractlauncher.py +++ b/antarest/launcher/adapters/abstractlauncher.py @@ -102,6 +102,6 @@ def update_log(log_line: str) -> None: channel=EventChannelDirectory.JOB_STATUS + job_id, ) ) - self.cache.put(f"Launch_Progress_{job_id}", launch_progress_dto.dict()) + self.cache.put(f"Launch_Progress_{job_id}", launch_progress_dto.model_dump()) return update_log diff --git a/antarest/launcher/service.py b/antarest/launcher/service.py index 75e88a01f6..b16c2476f3 100644 --- a/antarest/launcher/service.py +++ b/antarest/launcher/service.py @@ -175,7 +175,7 @@ def update( self.event_bus.push( Event( type=EventType.STUDY_JOB_COMPLETED if final_status else EventType.STUDY_JOB_STATUS_UPDATE, - payload=job_result.to_dto().dict(), + payload=job_result.to_dto().model_dump(), permissions=PermissionInfo(public_mode=PublicMode.READ), channel=EventChannelDirectory.JOB_STATUS + job_result.id, ) @@ -252,7 +252,7 @@ def run_study( self.event_bus.push( Event( type=EventType.STUDY_JOB_STARTED, - payload=job_status.to_dto().dict(), + payload=job_status.to_dto().model_dump(), permissions=PermissionInfo.from_study(study_info), ) ) @@ -293,7 +293,7 @@ def kill_job(self, job_id: str, params: RequestParameters) -> JobResult: self.event_bus.push( Event( type=EventType.STUDY_JOB_CANCELLED, - payload=job_status.to_dto().dict(), + payload=job_status.to_dto().model_dump(), permissions=PermissionInfo.from_study(study), channel=EventChannelDirectory.JOB_STATUS + job_result.id, ) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index d962039b07..4f328366c6 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -95,7 +95,7 @@ class UpdateAreaUi(BaseModel, extra="forbid", allow_population_by_field_name=Tru ... } >>> model = UpdateAreaUi(**obj) - >>> pprint(model.dict(by_alias=True), width=80) + >>> pprint(model.model_dump(by_alias=True), width=80) {'colorRgb': [230, 108, 44], 'layerColor': {0: '230, 108, 44', 4: '230, 108, 44', @@ -215,10 +215,14 @@ def from_model( obj = { "average_unsupplied_energy_cost": average_unsupplied_energy_cost, "average_spilled_energy_cost": average_spilled_energy_cost, - **area_folder.optimization.filtering.dict(by_alias=False), - **area_folder.optimization.nodal_optimization.dict(by_alias=False), + **area_folder.optimization.filtering.model_dump(by_alias=False), + **area_folder.optimization.nodal_optimization.model_dump(by_alias=False), # adequacy_patch is only available if study version >= 830. - **(area_folder.adequacy_patch.adequacy_patch.dict(by_alias=False) if area_folder.adequacy_patch else {}), + **( + area_folder.adequacy_patch.adequacy_patch.model_dump(by_alias=False) + if area_folder.adequacy_patch + else {} + ), } return cls(**obj) @@ -347,7 +351,7 @@ def update_areas_props( for area_id, update_area in update_areas_by_ids.items(): # Update the area properties. old_area = old_areas_by_ids[area_id] - new_area = old_area.copy(update=update_area.dict(by_alias=False, exclude_none=True)) + new_area = old_area.copy(update=update_area.model_dump(by_alias=False, exclude_none=True)) new_areas_by_ids[area_id] = new_area # Convert the DTO to a configuration object and update the configuration file. @@ -728,7 +732,7 @@ def update_thermal_cluster_metadata( id=area_id, name=file_study.config.areas[area_id].name, type=AreaType.AREA, - metadata=patch.areas.get(area_id, PatchArea()).dict(), + metadata=patch.areas.get(area_id, PatchArea()).model_dump(), thermals=self._get_clusters(file_study, area_id, patch), set=None, ) diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 78d0c188f6..7b9526f059 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -69,7 +69,7 @@ def validate_name(cls, name: t.Optional[str]) -> str: return name def to_config(self, study_version: t.Union[str, int]) -> RenewableConfigType: - values = self.dict(by_alias=False, exclude_none=True) + values = self.model_dump(by_alias=False, exclude_none=True) return create_renewable_config(study_version=study_version, **values) @@ -99,7 +99,7 @@ def create_renewable_output( config: t.Mapping[str, t.Any], ) -> "RenewableClusterOutput": obj = create_renewable_config(study_version=study_version, **config, id=cluster_id) - kwargs = obj.dict(by_alias=False) + kwargs = obj.model_dump(by_alias=False) return RenewableClusterOutput(**kwargs) @@ -206,7 +206,7 @@ def _make_create_cluster_cmd(self, area_id: str, cluster: RenewableConfigType) - command = CreateRenewablesCluster( area_id=area_id, cluster_name=cluster.id, - parameters=cluster.dict(by_alias=True, exclude={"id"}), + parameters=cluster.model_dump(by_alias=True, exclude={"id"}), command_context=self.storage_service.variant_study_service.command_factory.command_context, ) return command @@ -269,7 +269,7 @@ def update_cluster( old_config = create_renewable_config(study_version, **values) # use Python values to synchronize Config and Form values - new_values = cluster_data.dict(by_alias=False, exclude_none=True) + new_values = cluster_data.model_dump(by_alias=False, exclude_none=True) new_config = old_config.copy(exclude={"id"}, update=new_values) new_data = json.loads(new_config.model_dump_json(by_alias=True, exclude={"id"})) @@ -288,7 +288,7 @@ def update_cluster( ] execute_or_add_commands(study, file_study, commands, self.storage_service) - values = new_config.dict(by_alias=False) + values = new_config.model_dump(by_alias=False) return RenewableClusterOutput(**values, id=cluster_id) def delete_clusters(self, study: Study, area_id: str, cluster_ids: t.Sequence[str]) -> None: @@ -340,7 +340,7 @@ def duplicate_cluster( # Cluster duplication current_cluster = self.get_cluster(study, area_id, source_id) current_cluster.name = new_cluster_name - creation_form = RenewableClusterCreation(**current_cluster.dict(by_alias=False, exclude={"id"})) + creation_form = RenewableClusterCreation(**current_cluster.model_dump(by_alias=False, exclude={"id"})) new_config = creation_form.to_config(study.version) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) @@ -358,7 +358,7 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) - return RenewableClusterOutput(**new_config.dict(by_alias=False)) + return RenewableClusterOutput(**new_config.model_dump(by_alias=False)) def update_renewables_props( self, @@ -375,12 +375,12 @@ def update_renewables_props( for renewable_id, update_cluster in update_renewables_by_ids.items(): # Update the renewable cluster properties. old_cluster = old_renewables_by_ids[renewable_id] - new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_cluster = old_cluster.copy(update=update_cluster.model_dump(by_alias=False, exclude_none=True)) new_renewables_by_areas[area_id][renewable_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. properties = create_renewable_config( - study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + study.version, **new_cluster.model_dump(by_alias=False, exclude_none=True) ) path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=renewable_id) cmd = UpdateConfig( diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 26922f2155..b7655c867e 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -75,7 +75,7 @@ def validate_name(cls, name: t.Optional[str]) -> str: # noinspection PyUnusedLocal def to_config(self, study_version: t.Union[str, int]) -> STStorageConfigType: - values = self.dict(by_alias=False, exclude_none=True) + values = self.model_dump(by_alias=False, exclude_none=True) return create_st_storage_config(study_version=study_version, **values) @@ -237,7 +237,7 @@ def create_storage_output( config: t.Mapping[str, t.Any], ) -> "STStorageOutput": obj = create_st_storage_config(study_version=study_version, **config, id=cluster_id) - kwargs = obj.dict(by_alias=False) + kwargs = obj.model_dump(by_alias=False) return STStorageOutput(**kwargs) @@ -381,12 +381,12 @@ def update_storages_props( for storage_id, update_cluster in update_storages_by_ids.items(): # Update the storage cluster properties. old_cluster = old_storages_by_ids[storage_id] - new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_cluster = old_cluster.copy(update=update_cluster.model_dump(by_alias=False, exclude_none=True)) new_storages_by_areas[area_id][storage_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. properties = create_st_storage_config( - study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + study.version, **new_cluster.model_dump(by_alias=False, exclude_none=True) ) path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id) cmd = UpdateConfig( @@ -460,7 +460,7 @@ def update_storage( old_config = create_st_storage_config(study_version, **values) # use Python values to synchronize Config and Form values - new_values = form.dict(by_alias=False, exclude_none=True) + new_values = form.model_dump(by_alias=False, exclude_none=True) new_config = old_config.copy(exclude={"id"}, update=new_values) new_data = json.loads(new_config.model_dump_json(by_alias=True, exclude={"id"})) @@ -480,7 +480,7 @@ def update_storage( ] execute_or_add_commands(study, file_study, commands, self.storage_service) - values = new_config.dict(by_alias=False) + values = new_config.model_dump(by_alias=False) return STStorageOutput(**values, id=storage_id) def delete_storages( @@ -542,7 +542,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus # We should remove the field 'enabled' for studies before v8.8 as it didn't exist if int(study.version) < 880: fields_to_exclude.add("enabled") - creation_form = STStorageCreation(**current_cluster.dict(by_alias=False, exclude=fields_to_exclude)) + creation_form = STStorageCreation(**current_cluster.model_dump(by_alias=False, exclude=fields_to_exclude)) new_config = creation_form.to_config(study.version) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) @@ -571,7 +571,7 @@ def duplicate_cluster(self, study: Study, area_id: str, source_id: str, new_clus execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) - return STStorageOutput(**new_config.dict(by_alias=False)) + return STStorageOutput(**new_config.model_dump(by_alias=False)) def get_matrix( self, diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index 0940117195..3544fd13e9 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -78,7 +78,7 @@ def validate_name(cls, name: t.Optional[str]) -> str: return name def to_config(self, study_version: t.Union[str, int]) -> ThermalConfigType: - values = self.dict(by_alias=False, exclude_none=True) + values = self.model_dump(by_alias=False, exclude_none=True) return create_thermal_config(study_version=study_version, **values) @@ -109,7 +109,7 @@ def create_thermal_output( config: t.Mapping[str, t.Any], ) -> "ThermalClusterOutput": obj = create_thermal_config(study_version=study_version, **config, id=cluster_id) - kwargs = obj.dict(by_alias=False) + kwargs = obj.model_dump(by_alias=False) return ThermalClusterOutput(**kwargs) @@ -240,11 +240,13 @@ def update_thermals_props( for thermal_id, update_cluster in update_thermals_by_ids.items(): # Update the thermal cluster properties. old_cluster = old_thermals_by_ids[thermal_id] - new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_cluster = old_cluster.copy(update=update_cluster.model_dump(by_alias=False, exclude_none=True)) new_thermals_by_areas[area_id][thermal_id] = new_cluster # Convert the DTO to a configuration object and update the configuration file. - properties = create_thermal_config(study.version, **new_cluster.dict(by_alias=False, exclude_none=True)) + properties = create_thermal_config( + study.version, **new_cluster.model_dump(by_alias=False, exclude_none=True) + ) path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=thermal_id) cmd = UpdateConfig( target=path, @@ -293,7 +295,7 @@ def _make_create_cluster_cmd(self, area_id: str, cluster: ThermalConfigType) -> command = CreateCluster( area_id=area_id, cluster_name=cluster.id, - parameters=cluster.dict(by_alias=True, exclude={"id"}), + parameters=cluster.model_dump(by_alias=True, exclude={"id"}), command_context=self.storage_service.variant_study_service.command_factory.command_context, ) return command @@ -334,7 +336,7 @@ def update_cluster( old_config = create_thermal_config(study_version, **values) # Use Python values to synchronize Config and Form values - new_values = cluster_data.dict(by_alias=False, exclude_none=True) + new_values = cluster_data.model_dump(by_alias=False, exclude_none=True) new_config = old_config.copy(exclude={"id"}, update=new_values) new_data = json.loads(new_config.model_dump_json(by_alias=True, exclude={"id"})) @@ -353,7 +355,7 @@ def update_cluster( ] execute_or_add_commands(study, file_study, commands, self.storage_service) - values = new_config.dict(by_alias=False) + values = new_config.model_dump(by_alias=False) return ThermalClusterOutput(**values, id=cluster_id) def delete_clusters(self, study: Study, area_id: str, cluster_ids: t.Sequence[str]) -> None: @@ -406,7 +408,7 @@ def duplicate_cluster( # Cluster duplication source_cluster = self.get_cluster(study, area_id, source_id) source_cluster.name = new_cluster_name - creation_form = ThermalClusterCreation(**source_cluster.dict(by_alias=False, exclude={"id"})) + creation_form = ThermalClusterCreation(**source_cluster.model_dump(by_alias=False, exclude={"id"})) new_config = creation_form.to_config(study.version) create_cluster_cmd = self._make_create_cluster_cmd(area_id, new_config) @@ -439,7 +441,7 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) - return ThermalClusterOutput(**new_config.dict(by_alias=False)) + return ThermalClusterOutput(**new_config.model_dump(by_alias=False)) def validate_series(self, study: Study, area_id: str, cluster_id: str) -> bool: lower_cluster_id = cluster_id.lower() diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 744401772a..5b5f933712 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -109,7 +109,7 @@ def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], L for area2_id, properties_cfg in property_map.items(): area1_id, area2_id = sorted([area1_id, area2_id]) properties = LinkProperties(**properties_cfg) - links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.dict(by_alias=False)) + links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.model_dump(by_alias=False)) return links_by_ids @@ -125,11 +125,11 @@ def update_links_props( for (area1, area2), update_link_dto in update_links_by_ids.items(): # Update the link properties. old_link_dto = old_links_by_ids[(area1, area2)] - new_link_dto = old_link_dto.copy(update=update_link_dto.dict(by_alias=False, exclude_none=True)) + new_link_dto = old_link_dto.copy(update=update_link_dto.model_dump(by_alias=False, exclude_none=True)) new_links_by_ids[(area1, area2)] = new_link_dto # Convert the DTO to a configuration object and update the configuration file. - properties = LinkProperties(**new_link_dto.dict(by_alias=False)) + properties = LinkProperties(**new_link_dto.model_dump(by_alias=False)) path = f"{_ALL_LINKS_PATH}/{area1}/properties/{area2}" cmd = UpdateConfig( target=path, diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index bc31683139..d15fa660b0 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -83,36 +83,37 @@ def __init__( def _get_table_data_unsafe(self, study: RawStudy, table_type: TableModeType) -> TableDataDTO: if table_type == TableModeType.AREA: areas_map = self._area_manager.get_all_area_props(study) - data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + data = {area_id: area.model_dump(by_alias=True) for area_id, area in areas_map.items()} elif table_type == TableModeType.LINK: links_map = self._link_manager.get_all_links_props(study) data = { - f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() + f"{area1_id} / {area2_id}": link.model_dump(by_alias=True) + for (area1_id, area2_id), link in links_map.items() } elif table_type == TableModeType.THERMAL: thermals_by_areas = self._thermal_manager.get_all_thermals_props(study) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, thermals_by_ids in thermals_by_areas.items() for cluster_id, cluster in thermals_by_ids.items() } elif table_type == TableModeType.RENEWABLE: renewables_by_areas = self._renewable_manager.get_all_renewables_props(study) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, renewables_by_ids in renewables_by_areas.items() for cluster_id, cluster in renewables_by_ids.items() } elif table_type == TableModeType.ST_STORAGE: storages_by_areas = self._st_storage_manager.get_all_storages_props(study) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, storages_by_ids in storages_by_areas.items() for cluster_id, cluster in storages_by_ids.items() } elif table_type == TableModeType.BINDING_CONSTRAINT: bc_seq = self._binding_constraint_manager.get_binding_constraints(study) - data = {bc.id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} + data = {bc.id: bc.model_dump(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") return data @@ -177,13 +178,13 @@ def update_table_data( # Use AreaOutput to update properties of areas, which may include `None` values area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) - data = {area_id: area.dict(by_alias=True, exclude_none=True) for area_id, area in areas_map.items()} + data = {area_id: area.model_dump(by_alias=True, exclude_none=True) for area_id, area in areas_map.items()} return data elif table_type == TableModeType.LINK: links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} updated_map = self._link_manager.update_links_props(study, links_map) # type: ignore data = { - f"{area1_id} / {area2_id}": link.dict(by_alias=True) + f"{area1_id} / {area2_id}": link.model_dump(by_alias=True) for (area1_id, area2_id), link in updated_map.items() } return data @@ -195,7 +196,7 @@ def update_table_data( thermals_by_areas[area_id][cluster_id] = ThermalClusterInput(**values) thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, thermals_by_ids in thermals_map.items() for cluster_id, cluster in thermals_by_ids.items() } @@ -208,7 +209,7 @@ def update_table_data( renewables_by_areas[area_id][cluster_id] = RenewableClusterInput(**values) renewables_map = self._renewable_manager.update_renewables_props(study, renewables_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, renewables_by_ids in renewables_map.items() for cluster_id, cluster in renewables_by_ids.items() } @@ -221,7 +222,7 @@ def update_table_data( storages_by_areas[area_id][cluster_id] = STStorageInput(**values) storages_map = self._st_storage_manager.update_storages_props(study, storages_by_areas) data = { - f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + f"{area_id} / {cluster_id}": cluster.model_dump(by_alias=True, exclude={"id", "name"}) for area_id, storages_by_ids in storages_map.items() for cluster_id, cluster in storages_by_ids.items() } @@ -229,7 +230,9 @@ def update_table_data( elif table_type == TableModeType.BINDING_CONSTRAINT: bcs_by_ids = {key: ConstraintInput(**values) for key, values in data.items()} bcs_map = self._binding_constraint_manager.update_binding_constraints(study, bcs_by_ids) - return {bc_id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc_id, bc in bcs_map.items()} + return { + bc_id: bc.model_dump(by_alias=True, exclude={"id", "name", "terms"}) for bc_id, bc in bcs_map.items() + } else: # pragma: no cover raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/business/thematic_trimming_management.py b/antarest/study/business/thematic_trimming_management.py index d4af9f960e..85811935d9 100644 --- a/antarest/study/business/thematic_trimming_management.py +++ b/antarest/study/business/thematic_trimming_management.py @@ -37,7 +37,7 @@ def set_field_values(self, study: Study, field_values: ThematicTrimmingFormField Set Thematic Trimming config from the webapp form """ file_study = self.storage_service.get_storage(study).get_raw(study) - field_values_dict = field_values.dict() + field_values_dict = field_values.model_dump() keys_by_bool: t.Dict[bool, t.List[t.Any]] = {True: [], False: []} fields_info = get_fields_info(int(study.version)) diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py index 71f0d02438..0b3d15d95f 100644 --- a/antarest/study/business/timeseries_config_management.py +++ b/antarest/study/business/timeseries_config_management.py @@ -118,7 +118,7 @@ def __set_field_values_for_type( field_values: TSFormFieldsForType, ) -> None: commands: t.List[UpdateConfig] = [] - values = field_values.dict() + values = field_values.model_dump() for field, path in PATH_BY_TS_STR_FIELD.items(): field_val = values[field] diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 68eeaca0e8..61991802eb 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -335,9 +335,11 @@ def create_xpansion_configuration(self, study: Study, zipped_config: t.Optional[ raise BadZipBinary("Only zip file are allowed.") xpansion_settings = XpansionSettings() - settings_obj = xpansion_settings.dict(by_alias=True, exclude_none=True, exclude={"sensitivity_config"}) + settings_obj = xpansion_settings.model_dump( + by_alias=True, exclude_none=True, exclude={"sensitivity_config"} + ) if xpansion_settings.sensitivity_config: - sensitivity_obj = xpansion_settings.sensitivity_config.dict(by_alias=True, exclude_none=True) + sensitivity_obj = xpansion_settings.sensitivity_config.model_dump(by_alias=True, exclude_none=True) else: sensitivity_obj = {} @@ -377,7 +379,9 @@ def update_xpansion_settings( logger.info(f"Updating xpansion settings for study '{study.id}'") actual_settings = self.get_xpansion_settings(study) - settings_fields = new_xpansion_settings.dict(by_alias=False, exclude_none=True, exclude={"sensitivity_config"}) + settings_fields = new_xpansion_settings.model_dump( + by_alias=False, exclude_none=True, exclude={"sensitivity_config"} + ) updated_settings = actual_settings.copy(deep=True, update=settings_fields) file_study = self.study_storage_service.get_storage(study).get_raw(study) @@ -397,11 +401,11 @@ def update_xpansion_settings( msg = f"Additional constraints file '{constraints_file}' does not exist" raise XpansionFileNotFoundError(msg) from None - config_obj = updated_settings.dict(by_alias=True, exclude={"sensitivity_config"}) + config_obj = updated_settings.model_dump(by_alias=True, exclude={"sensitivity_config"}) file_study.tree.save(config_obj, ["user", "expansion", "settings"]) if new_xpansion_settings.sensitivity_config: - sensitivity_obj = new_xpansion_settings.sensitivity_config.dict(by_alias=True) + sensitivity_obj = new_xpansion_settings.sensitivity_config.model_dump(by_alias=True) file_study.tree.save(sensitivity_obj, ["user", "expansion", "sensitivity", "sensitivity_in"]) return self.get_xpansion_settings(study) @@ -541,7 +545,7 @@ def add_candidate(self, study: Study, xpansion_candidate: XpansionCandidateDTO) ) # The primary key is actually the name, the id does not matter and is never checked. logger.info(f"Adding candidate '{xpansion_candidate.name}' to study '{study.id}'") - candidates_obj[next_id] = xpansion_candidate.dict(by_alias=True, exclude_none=True) + candidates_obj[next_id] = xpansion_candidate.model_dump(by_alias=True, exclude_none=True) candidates_data = {"user": {"expansion": {"candidates": candidates_obj}}} file_study.tree.save(candidates_data) # Should we add a field in the study config containing the xpansion candidates like the links or the areas ? @@ -582,7 +586,7 @@ def update_candidate( for candidate_id, candidate in candidates.items(): if candidate["name"] == candidate_name: logger.info(f"Updating candidate '{candidate_name}' of study '{study.id}'") - candidates[candidate_id] = xpansion_candidate_dto.dict(by_alias=True, exclude_none=True) + candidates[candidate_id] = xpansion_candidate_dto.model_dump(by_alias=True, exclude_none=True) file_study.tree.save(candidates, ["user", "expansion", "candidates"]) return raise CandidateNotFoundError(f"The candidate '{xpansion_candidate_dto.name}' does not exist") diff --git a/antarest/study/service.py b/antarest/study/service.py index d270e8daa3..53552a1973 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -1272,7 +1272,7 @@ def export_task(_notifier: TaskUpdateNotifier) -> TaskResult: else: json_response = json.dumps( - matrix.dict(), + matrix.model_dump(), ensure_ascii=False, allow_nan=True, indent=None, @@ -2367,7 +2367,7 @@ def unarchive_output_task( src=str(src), dest=str(dest), remove_src=not keep_src_zip, - ).dict(), + ).model_dump(), name=task_name, ref_id=study.id, request_params=params, @@ -2395,7 +2395,7 @@ def generate_timeseries(self, study: Study, params: RequestParameters) -> None: managed=is_managed(study), study_path=str(study.path), study_version=str(study.version), - ).dict(), + ).model_dump(), name=f"Generate timeseries for study {study.id}", ref_id=study.id, request_params=params, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py index 9ba7e9b022..1682b189b4 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -42,7 +42,7 @@ class OptimizationProperties(IniProperties): >>> opt = OptimizationProperties(**obj) - >>> pprint(opt.dict(by_alias=True), width=80) + >>> pprint(opt.model_dump(by_alias=True), width=80) {'filtering': {'filter-synthesis': 'hourly, daily, weekly, monthly, annual', 'filter-year-by-year': 'hourly, annual'}, 'nodal optimization': {'dispatchable-hydro-power': False, @@ -63,7 +63,7 @@ class OptimizationProperties(IniProperties): Convert the object to a dictionary for writing to a configuration file: - >>> pprint(opt.dict(by_alias=True, exclude_defaults=True), width=80) + >>> pprint(opt.model_dump(by_alias=True, exclude_defaults=True), width=80) {'filtering': {'filter-synthesis': 'hourly, weekly, monthly, annual', 'filter-year-by-year': 'hourly, monthly, annual'}, 'nodal optimization': {'dispatchable-hydro-power': False, @@ -147,13 +147,13 @@ class AreaUI(IniProperties): ... "color_b": 255, ... } >>> ui = AreaUI(**obj) - >>> pprint(ui.dict(by_alias=True), width=80) + >>> pprint(ui.model_dump(by_alias=True), width=80) {'colorRgb': '#0080FF', 'x': 1148, 'y': 144} Update the color: >>> ui.color_rgb = (192, 168, 127) - >>> pprint(ui.dict(by_alias=True), width=80) + >>> pprint(ui.model_dump(by_alias=True), width=80) {'colorRgb': '#C0A87F', 'x': 1148, 'y': 144} """ @@ -204,7 +204,7 @@ class UIProperties(IniProperties): UIProperties has default values for `style` and `layers`: >>> ui = UIProperties() - >>> pprint(ui.dict(), width=80) + >>> pprint(ui.model_dump(), width=80) {'layer_styles': {0: {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}, 'layers': {0}, 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}} @@ -232,7 +232,7 @@ class UIProperties(IniProperties): ... } >>> ui = UIProperties(**obj) - >>> pprint(ui.dict(), width=80) + >>> pprint(ui.model_dump(), width=80) {'layer_styles': {0: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, 4: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, 6: {'color_rgb': '#C0A863', 'x': 1148, 'y': 144}, @@ -266,7 +266,7 @@ def _set_default_style(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[s elif isinstance(style, dict): values["style"] = AreaUI(**style) else: - values["style"] = AreaUI(**style.dict()) + values["style"] = AreaUI(**style.model_dump()) return values @model_validator(mode="before") @@ -290,7 +290,7 @@ def _set_default_layer_styles(cls, values: t.MutableMapping[str, t.Any]) -> t.Ma if isinstance(style, dict): values["layer_styles"][key] = AreaUI(**style) else: - values["layer_styles"][key] = AreaUI(**style.dict()) + values["layer_styles"][key] = AreaUI(**style.model_dump()) else: raise TypeError(f"Invalid type for layer_styles: {type(layer_styles)}") return values @@ -393,7 +393,7 @@ class AreaFolder(IniProperties): Create and validate a new AreaProperties object from a dictionary read from a configuration file. >>> obj = AreaFolder() - >>> pprint(obj.dict(), width=80) + >>> pprint(obj.model_dump(), width=80) {'adequacy_patch': None, 'optimization': {'filtering': {'filter_synthesis': '', 'filter_year_by_year': ''}, @@ -438,7 +438,7 @@ class AreaFolder(IniProperties): ... } >>> obj = AreaFolder.construct(**data) - >>> pprint(obj.dict(), width=80) + >>> pprint(obj.model_dump(), width=80) {'adequacy_patch': None, 'optimization': {'filtering': {'filter-synthesis': 'annual, centennial'}, 'nodal optimization': {'spread-spilled-energy-cost': '15.5', @@ -500,7 +500,7 @@ class ThermalAreasProperties(IniProperties): ... }, ... } >>> area = ThermalAreasProperties(**obj) - >>> pprint(area.dict(), width=80) + >>> pprint(area.model_dump(), width=80) {'spilled_energy_cost': {'cz': 100.0}, 'unserverd_energy_cost': {'at': 4000.8, 'be': 3500.0, @@ -511,7 +511,7 @@ class ThermalAreasProperties(IniProperties): >>> area.unserverd_energy_cost["at"] = 6500.0 >>> area.unserverd_energy_cost["fr"] = 0.0 - >>> pprint(area.dict(), width=80) + >>> pprint(area.model_dump(), width=80) {'spilled_energy_cost': {'cz': 100.0}, 'unserverd_energy_cost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py index 4eef1b932c..527138625e 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/links.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -84,7 +84,7 @@ class LinkProperties(IniProperties): >>> opt = LinkProperties(**obj) - >>> pprint(opt.dict(by_alias=True), width=80) + >>> pprint(opt.model_dump(by_alias=True), width=80) {'asset-type': , 'colorRgb': '#50C0FF', 'comments': 'This is a link', diff --git a/antarest/study/storage/rawstudy/model/filesystem/factory.py b/antarest/study/storage/rawstudy/model/filesystem/factory.py index 303c807310..7d70756c71 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/factory.py +++ b/antarest/study/storage/rawstudy/model/filesystem/factory.py @@ -106,7 +106,7 @@ def _create_from_fs_unsafe( logger.info(f"Cache new entry from StudyFactory (studyID: {study_id})") self.cache.put( cache_id, - FileStudyTreeConfigDTO.from_build_config(config).dict(), + FileStudyTreeConfigDTO.from_build_config(config).model_dump(), ) return result diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py index 0c922fed03..55d91cf35e 100644 --- a/antarest/study/storage/study_download_utils.py +++ b/antarest/study/storage/study_download_utils.py @@ -333,7 +333,7 @@ def export( if filetype == ExportFormat.JSON: with open(target_file, "w") as fh: json.dump( - matrix.dict(), + matrix.model_dump(), fh, ensure_ascii=False, allow_nan=True, diff --git a/antarest/study/storage/variantstudy/business/command_extractor.py b/antarest/study/storage/variantstudy/business/command_extractor.py index 4ac5070a69..4703a90dde 100644 --- a/antarest/study/storage/variantstudy/business/command_extractor.py +++ b/antarest/study/storage/variantstudy/business/command_extractor.py @@ -211,7 +211,7 @@ def _extract_cluster(self, study: FileStudy, area_id: str, cluster_id: str, rene create_cluster_command( area_id=area_id, cluster_name=cluster.id, - parameters=cluster.dict(by_alias=True, exclude_defaults=True, exclude={"id"}), + parameters=cluster.model_dump(by_alias=True, exclude_defaults=True, exclude={"id"}), command_context=self.command_context, ), self.generate_replace_matrix( diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index c72f8323ab..f004c50d1b 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -408,7 +408,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: bd_id = transform_name_to_id(self.name) study_version = study_data.config.version - props = create_binding_constraint_config(study_version, **self.dict()) + props = create_binding_constraint_config(study_version, **self.model_dump()) obj = json.loads(props.model_dump_json(by_alias=True)) new_binding = {"id": bd_id, "name": self.name, **obj} diff --git a/antarest/study/storage/variantstudy/model/command/icommand.py b/antarest/study/storage/variantstudy/model/command/icommand.py index b214f8c7bb..2fe8674b32 100644 --- a/antarest/study/storage/variantstudy/model/command/icommand.py +++ b/antarest/study/storage/variantstudy/model/command/icommand.py @@ -127,8 +127,8 @@ def match(self, other: "ICommand", equal: bool = False) -> bool: if not isinstance(other, self.__class__): return False excluded_fields = set(ICommand.model_fields) - this_values = self.dict(exclude=excluded_fields) - that_values = other.dict(exclude=excluded_fields) + this_values = self.model_dump(exclude=excluded_fields) + that_values = other.model_dump(exclude=excluded_fields) return this_values == that_values @abstractmethod diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index c79317dd7c..a9f258768e 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -167,14 +167,14 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: ) study_version = study_data.config.version - props = create_binding_constraint_config(study_version, **self.dict()) + props = create_binding_constraint_config(study_version, **self.model_dump()) obj = json.loads(props.model_dump_json(by_alias=True, exclude_unset=True)) updated_cfg = binding_constraints[index] updated_cfg.update(obj) excluded_fields = set(ICommand.model_fields) | {"id"} - updated_properties = self.dict(exclude=excluded_fields, exclude_none=True) + updated_properties = self.model_dump(exclude=excluded_fields, exclude_none=True) # This 2nd check is here to remove the last term. if self.coeffs or updated_properties == {"coeffs": {}}: # Remove terms which IDs contain a "%" or a "." in their name diff --git a/antarest/study/storage/variantstudy/snapshot_generator.py b/antarest/study/storage/variantstudy/snapshot_generator.py index 5174c6fc2c..e969a9c1f7 100644 --- a/antarest/study/storage/variantstudy/snapshot_generator.py +++ b/antarest/study/storage/variantstudy/snapshot_generator.py @@ -199,7 +199,7 @@ def _update_cache(self, file_study: FileStudy) -> None: self.cache.invalidate(f"{CacheConstants.RAW_STUDY}/{file_study.config.study_id}") self.cache.put( f"{CacheConstants.STUDY_FACTORY}/{file_study.config.study_id}", - FileStudyTreeConfigDTO.from_build_config(file_study.config).dict(), + FileStudyTreeConfigDTO.from_build_config(file_study.config).model_dump(), ) diff --git a/antarest/study/web/xpansion_studies_blueprint.py b/antarest/study/web/xpansion_studies_blueprint.py index 1b46af1a84..3bce9bb47b 100644 --- a/antarest/study/web/xpansion_studies_blueprint.py +++ b/antarest/study/web/xpansion_studies_blueprint.py @@ -127,7 +127,7 @@ def add_candidate( current_user: JWTUser = Depends(auth.get_current_user), ) -> XpansionCandidateDTO: logger.info( - f"Adding new candidate {xpansion_candidate_dto.dict(by_alias=True)} to study {uuid}", + f"Adding new candidate {xpansion_candidate_dto.model_dump(by_alias=True)} to study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) diff --git a/antarest/tools/lib.py b/antarest/tools/lib.py index 11fc1ba96f..a2e2dff1eb 100644 --- a/antarest/tools/lib.py +++ b/antarest/tools/lib.py @@ -87,7 +87,7 @@ def apply_commands( res = self.session.post( self.build_url(f"/v1/studies/{self.study_id}/commands"), - json=[command.dict() for command in commands], + json=[command.model_dump() for command in commands], ) res.raise_for_status() stopwatch.log_elapsed(lambda x: logger.info(f"Command upload done in {x}s")) @@ -197,7 +197,7 @@ def extract_commands(study_path: Path, commands_output_dir: Path) -> None: (commands_output_dir / COMMAND_FILE).write_text( json.dumps( - [command.dict(exclude={"id"}) for command in command_list], + [command.model_dump(exclude={"id"}) for command in command_list], indent=2, ) ) @@ -289,7 +289,7 @@ def generate_diff( (output_dir / COMMAND_FILE).write_text( json.dumps( - [command.to_dto().dict(exclude={"id"}) for command in diff_commands], + [command.to_dto().model_dump(exclude={"id"}) for command in diff_commands], indent=2, ) ) diff --git a/tests/cache/test_local_cache.py b/tests/cache/test_local_cache.py index b9fae75ee9..79deddfea4 100644 --- a/tests/cache/test_local_cache.py +++ b/tests/cache/test_local_cache.py @@ -29,11 +29,11 @@ def test_lifecycle(): id = "some_id" duration = 3600 timeout = int(time.time()) + duration - cache_element = LocalCacheElement(duration=duration, data=config.dict(), timeout=timeout) + cache_element = LocalCacheElement(duration=duration, data=config.model_dump(), timeout=timeout) # PUT - cache.put(id=id, data=config.dict(), duration=duration) + cache.put(id=id, data=config.model_dump(), duration=duration) assert cache.cache[id] == cache_element # GET - assert cache.get(id=id) == config.dict() + assert cache.get(id=id) == config.model_dump() diff --git a/tests/cache/test_redis_cache.py b/tests/cache/test_redis_cache.py index 362c58644d..fe47050edf 100644 --- a/tests/cache/test_redis_cache.py +++ b/tests/cache/test_redis_cache.py @@ -28,7 +28,7 @@ def test_lifecycle(): id = "some_id" redis_key = f"cache:{id}" duration = 3600 - cache_element = RedisCacheElement(duration=duration, data=config.dict()).model_dump_json() + cache_element = RedisCacheElement(duration=duration, data=config.model_dump()).model_dump_json() # GET redis_client.get.return_value = cache_element @@ -39,7 +39,7 @@ def test_lifecycle(): # PUT duration = 7200 - cache_element = RedisCacheElement(duration=duration, data=config.dict()).model_dump_json() - cache.put(id=id, data=config.dict(), duration=duration) + cache_element = RedisCacheElement(duration=duration, data=config.model_dump()).model_dump_json() + cache.put(id=id, data=config.model_dump(), duration=duration) redis_client.set.assert_called_once_with(redis_key, cache_element) redis_client.expire.assert_called_with(redis_key, duration) diff --git a/tests/core/test_tasks.py b/tests/core/test_tasks.py index 7edc25ec6f..c8d3bda5aa 100644 --- a/tests/core/test_tasks.py +++ b/tests/core/test_tasks.py @@ -108,7 +108,7 @@ def test_service(core_config: Config, event_bus: IEventBus) -> None: "status": TaskStatus.FAILED, "type": None, } - assert res.dict() == expected + assert res.model_dump() == expected # Test Case: add a task that fails and wait for it # ================================================ diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 08aa099dd1..ee1adfe74a 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -613,15 +613,15 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # -- `layers` integration tests res = client.get(f"/v1/studies/{study_id}/layers") - assert res.json() == [LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict()] + assert res.json() == [LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump()] res = client.post(f"/v1/studies/{study_id}/layers?name=test") assert res.json() == "1" res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), - LayerInfoDTO(id="1", name="test", areas=[]).dict(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), + LayerInfoDTO(id="1", name="test", areas=[]).model_dump(), ] res = client.put(f"/v1/studies/{study_id}/layers/1?name=test2") @@ -632,8 +632,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: assert res.status_code in {200, 201}, res.json() res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), - LayerInfoDTO(id="1", name="test2", areas=["area 2"]).dict(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), + LayerInfoDTO(id="1", name="test2", areas=["area 2"]).model_dump(), ] # Delete the layer '1' that has 1 area @@ -643,7 +643,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Ensure the layer is deleted res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), ] # Create the layer again without areas @@ -657,7 +657,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: # Ensure the layer is deleted res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ - LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), + LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).model_dump(), ] # Try to delete a non-existing layer diff --git a/tests/integration/variant_blueprint/test_thermal_cluster.py b/tests/integration/variant_blueprint/test_thermal_cluster.py index 245fd8a02a..914dbc3070 100644 --- a/tests/integration/variant_blueprint/test_thermal_cluster.py +++ b/tests/integration/variant_blueprint/test_thermal_cluster.py @@ -128,7 +128,7 @@ def test_cascade_update( ) assert res.status_code == http.HTTPStatus.OK, res.json() task = TaskDTO(**res.json()) - assert task.dict() == { + assert task.model_dump() == { "completion_date_utc": mock.ANY, "creation_date_utc": mock.ANY, "id": task_id, diff --git a/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py b/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py index fc7f92d81c..1bbcef8f99 100644 --- a/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py +++ b/tests/integration/xpansion_studies_blueprint/test_integration_xpansion.py @@ -319,13 +319,13 @@ def test_integration_xpansion(client: TestClient, tmp_path: Path, admin_access_t res = xp_client.get(f"candidates/{candidate1['name']}", headers=headers) assert res.status_code == 200 - assert res.json() == XpansionCandidateDTO.model_validate(candidate1).dict(by_alias=True) + assert res.json() == XpansionCandidateDTO.model_validate(candidate1).model_dump(by_alias=True) res = xp_client.get("candidates", headers=headers) assert res.status_code == 200 assert res.json() == [ - XpansionCandidateDTO.model_validate(candidate1).dict(by_alias=True), - XpansionCandidateDTO.model_validate(candidate4).dict(by_alias=True), + XpansionCandidateDTO.model_validate(candidate1).model_dump(by_alias=True), + XpansionCandidateDTO.model_validate(candidate4).model_dump(by_alias=True), ] res = xp_client.delete(f"resources/capacities/{filename_capa1}", headers=headers) diff --git a/tests/launcher/test_service.py b/tests/launcher/test_service.py index 0cea997adf..f196130a64 100644 --- a/tests/launcher/test_service.py +++ b/tests/launcher/test_service.py @@ -130,12 +130,12 @@ def test_service_run_study(self, get_current_user_mock) -> None: # so we need to compare them manually. mock_call = repository.save.mock_calls[0] actual_obj: JobResult = mock_call.args[0] - assert actual_obj.to_dto().dict() == pending.to_dto().dict() + assert actual_obj.to_dto().model_dump() == pending.to_dto().model_dump() event_bus.push.assert_called_once_with( Event( type=EventType.STUDY_JOB_STARTED, - payload=pending.to_dto().dict(), + payload=pending.to_dto().model_dump(), permissions=PermissionInfo(owner=0), ) ) @@ -890,7 +890,7 @@ def test_save_solver_stats(self, tmp_path: Path) -> None: solver_stats=expected_saved_stats, owner_id=1, ) - assert actual_obj.to_dto().dict() == expected_obj.to_dto().dict() + assert actual_obj.to_dto().model_dump() == expected_obj.to_dto().model_dump() zip_file = tmp_path / "test.zip" with ZipFile(zip_file, "w", ZIP_DEFLATED) as output_data: @@ -907,7 +907,7 @@ def test_save_solver_stats(self, tmp_path: Path) -> None: solver_stats="0\n1", owner_id=1, ) - assert actual_obj.to_dto().dict() == expected_obj.to_dto().dict() + assert actual_obj.to_dto().model_dump() == expected_obj.to_dto().model_dump() @pytest.mark.parametrize( ["running_jobs", "expected_result", "default_launcher"], diff --git a/tests/login/test_login_service.py b/tests/login/test_login_service.py index e48a54a918..609b47a0e7 100644 --- a/tests/login/test_login_service.py +++ b/tests/login/test_login_service.py @@ -370,7 +370,7 @@ def test_get_group_info(self, login_service: LoginService) -> None: actual = login_service.get_group_info("superman", _param) assert actual is not None assert actual.name == "Superman" - assert [obj.dict() for obj in actual.users] == [ + assert [obj.model_dump() for obj in actual.users] == [ {"id": 2, "name": "Clark Kent", "role": RoleType.ADMIN}, {"id": 3, "name": "Lois Lane", "role": RoleType.READER}, ] @@ -450,7 +450,7 @@ def test_get_user_info(self, login_service: LoginService) -> None: clark_id = 2 actual = login_service.get_user_info(clark_id, _param) assert actual is not None - assert actual.dict() == { + assert actual.model_dump() == { "id": clark_id, "name": "Clark Kent", "roles": [ @@ -468,7 +468,7 @@ def test_get_user_info(self, login_service: LoginService) -> None: lois_id = 3 actual = login_service.get_user_info(lois_id, _param) assert actual is not None - assert actual.dict() == { + assert actual.model_dump() == { "id": lois_id, "name": "Lois Lane", "roles": [ @@ -491,7 +491,7 @@ def test_get_user_info(self, login_service: LoginService) -> None: _param = get_user_param(login_service, user_id=lois_id, group_id="superman") actual = login_service.get_user_info(lois_id, _param) assert actual is not None - assert actual.dict() == { + assert actual.model_dump() == { "id": lois_id, "name": "Lois Lane", "roles": [ @@ -512,7 +512,7 @@ def test_get_user_info(self, login_service: LoginService) -> None: _param = get_bot_param(login_service, bot_id=bot.id) actual = login_service.get_user_info(lois_id, _param) assert actual is not None - assert actual.dict() == { + assert actual.model_dump() == { "id": lois_id, "name": "Lois Lane", "roles": [ @@ -566,13 +566,13 @@ def test_get_bot_info(self, login_service: LoginService) -> None: _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_bot_info(joh_bot.id, _param) assert actual is not None - assert actual.dict() == {"id": 6, "isAuthor": True, "name": "Maria", "roles": []} + assert actual.model_dump() == {"id": 6, "isAuthor": True, "name": "Maria", "roles": []} # Joh Fredersen can get its own bot _param = get_user_param(login_service, user_id=joh_id, group_id="superman") actual = login_service.get_bot_info(joh_bot.id, _param) assert actual is not None - assert actual.dict() == {"id": 6, "isAuthor": True, "name": "Maria", "roles": []} + assert actual.model_dump() == {"id": 6, "isAuthor": True, "name": "Maria", "roles": []} # The bot cannot get itself _param = get_bot_param(login_service, bot_id=joh_bot.id) @@ -601,13 +601,13 @@ def test_get_all_bots_by_owner(self, login_service: LoginService) -> None: _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_all_bots_by_owner(joh_id, _param) expected = [{"id": joh_bot.id, "is_author": True, "name": "Maria", "owner": joh_id}] - assert [obj.to_dto().dict() for obj in actual] == expected + assert [obj.to_dto().model_dump() for obj in actual] == expected # Freder Fredersen can get its own bot _param = get_user_param(login_service, user_id=joh_id, group_id="superman") actual = login_service.get_all_bots_by_owner(joh_id, _param) expected = [{"id": joh_bot.id, "is_author": True, "name": "Maria", "owner": joh_id}] - assert [obj.to_dto().dict() for obj in actual] == expected + assert [obj.to_dto().model_dump() for obj in actual] == expected # The bot cannot get itself _param = get_bot_param(login_service, bot_id=joh_bot.id) @@ -718,7 +718,7 @@ def test_get_all_groups(self, login_service: LoginService) -> None: # The site admin can get all groups _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_all_groups(_param) - assert [g.dict() for g in actual] == [ + assert [g.model_dump() for g in actual] == [ {"id": "admin", "name": "X-Men"}, {"id": "superman", "name": "Superman"}, {"id": "metropolis", "name": "Metropolis"}, @@ -727,19 +727,19 @@ def test_get_all_groups(self, login_service: LoginService) -> None: # The group admin can its own groups _param = get_user_param(login_service, user_id=2, group_id="superman") actual = login_service.get_all_groups(_param) - assert [g.dict() for g in actual] == [{"id": "superman", "name": "Superman"}] + assert [g.model_dump() for g in actual] == [{"id": "superman", "name": "Superman"}] # The user can get its own groups _param = get_user_param(login_service, user_id=3, group_id="superman") actual = login_service.get_all_groups(_param) - assert [g.dict() for g in actual] == [{"id": "superman", "name": "Superman"}] + assert [g.model_dump() for g in actual] == [{"id": "superman", "name": "Superman"}] @with_db_context def test_get_all_users(self, login_service: LoginService) -> None: # The site admin can get all users _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_all_users(_param) - assert [u.dict() for u in actual] == [ + assert [u.model_dump() for u in actual] == [ {"id": 1, "name": "Professor Xavier"}, {"id": 2, "name": "Clark Kent"}, {"id": 3, "name": "Lois Lane"}, @@ -751,7 +751,7 @@ def test_get_all_users(self, login_service: LoginService) -> None: # note: I don't know why the group admin can get all users -- Laurent _param = get_user_param(login_service, user_id=2, group_id="superman") actual = login_service.get_all_users(_param) - assert [u.dict() for u in actual] == [ + assert [u.model_dump() for u in actual] == [ {"id": 1, "name": "Professor Xavier"}, {"id": 2, "name": "Clark Kent"}, {"id": 3, "name": "Lois Lane"}, @@ -762,7 +762,7 @@ def test_get_all_users(self, login_service: LoginService) -> None: # The user can get its own users _param = get_user_param(login_service, user_id=3, group_id="superman") actual = login_service.get_all_users(_param) - assert [u.dict() for u in actual] == [ + assert [u.model_dump() for u in actual] == [ {"id": 2, "name": "Clark Kent"}, {"id": 3, "name": "Lois Lane"}, ] @@ -777,7 +777,7 @@ def test_get_all_bots(self, login_service: LoginService) -> None: # The site admin can get all bots _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_all_bots(_param) - assert [b.to_dto().dict() for b in actual] == [ + assert [b.to_dto().model_dump() for b in actual] == [ {"id": joh_bot.id, "is_author": True, "name": "Maria", "owner": joh_id}, ] @@ -796,7 +796,7 @@ def test_get_all_roles_in_group(self, login_service: LoginService) -> None: # The site admin can get all roles in a given group _param = get_user_param(login_service, user_id=ADMIN_ID, group_id="admin") actual = login_service.get_all_roles_in_group("superman", _param) - assert [b.to_dto().dict() for b in actual] == [ + assert [b.to_dto().model_dump() for b in actual] == [ { "group": {"id": "superman", "name": "Superman"}, "identity": {"id": 2, "name": "Clark Kent"}, @@ -812,7 +812,7 @@ def test_get_all_roles_in_group(self, login_service: LoginService) -> None: # The group admin can get all roles his own group _param = get_user_param(login_service, user_id=2, group_id="superman") actual = login_service.get_all_roles_in_group("superman", _param) - assert [b.to_dto().dict() for b in actual] == [ + assert [b.to_dto().model_dump() for b in actual] == [ { "group": {"id": "superman", "name": "Superman"}, "identity": {"id": 2, "name": "Clark Kent"}, diff --git a/tests/login/test_web.py b/tests/login/test_web.py index 41722ee07b..7efddd1f7e 100644 --- a/tests/login/test_web.py +++ b/tests/login/test_web.py @@ -177,7 +177,7 @@ def test_user() -> None: client = TestClient(app) res = client.get("/v1/users", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == [User(id=1, name="user").to_dto().dict()] + assert res.json() == [User(id=1, name="user").to_dto().model_dump()] @pytest.mark.unit_test @@ -189,7 +189,7 @@ def test_user_id() -> None: client = TestClient(app) res = client.get("/v1/users/1", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == User(id=1, name="user").to_dto().dict() + assert res.json() == User(id=1, name="user").to_dto().model_dump() @pytest.mark.unit_test @@ -201,7 +201,7 @@ def test_user_id_with_details() -> None: client = TestClient(app) res = client.get("/v1/users/1?details=true", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == IdentityDTO(id=1, name="user", roles=[]).dict() + assert res.json() == IdentityDTO(id=1, name="user", roles=[]).model_dump() @pytest.mark.unit_test @@ -216,12 +216,12 @@ def test_user_create() -> None: res = client.post( "/v1/users", headers=create_auth_token(app), - json=user.dict(), + json=user.model_dump(), ) assert res.status_code == 200 service.create_user.assert_called_once_with(user, PARAMS) - assert res.json() == user_id.to_dto().dict() + assert res.json() == user_id.to_dto().model_dump() @pytest.mark.unit_test @@ -232,7 +232,7 @@ def test_user_save() -> None: app = create_app(service) client = TestClient(app) - user_obj = user.to_dto().dict() + user_obj = user.to_dto().model_dump() res = client.put( "/v1/users/0", headers=create_auth_token(app), @@ -244,7 +244,7 @@ def test_user_save() -> None: assert service.save_user.call_count == 1 call = service.save_user.call_args_list[0] - assert call[0][0].to_dto().dict() == user_obj + assert call[0][0].to_dto().model_dump() == user_obj assert call[0][1] == PARAMS @@ -269,7 +269,7 @@ def test_group() -> None: client = TestClient(app) res = client.get("/v1/groups", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == [Group(id="my-group", name="group").to_dto().dict()] + assert res.json() == [Group(id="my-group", name="group").to_dto().model_dump()] @pytest.mark.unit_test @@ -281,7 +281,7 @@ def test_group_id() -> None: client = TestClient(app) res = client.get("/v1/groups/1", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == Group(id="my-group", name="group").to_dto().dict() + assert res.json() == Group(id="my-group", name="group").to_dto().model_dump() @pytest.mark.unit_test @@ -299,7 +299,7 @@ def test_group_create() -> None: ) assert res.status_code == 200 - assert res.json() == group.to_dto().dict() + assert res.json() == group.to_dto().model_dump() @pytest.mark.unit_test @@ -351,7 +351,7 @@ def test_role_create() -> None: ) assert res.status_code == 200 - assert RoleDetailDTO.model_validate(res.json()) == role.to_dto().dict() + assert RoleDetailDTO.model_validate(res.json()) == role.to_dto().model_dump() @pytest.mark.unit_test @@ -394,7 +394,7 @@ def test_bot_create() -> None: print(create.json()) app = create_app(service) client = TestClient(app) - res = client.post("/v1/bots", headers=create_auth_token(app), json=create.dict()) + res = client.post("/v1/bots", headers=create_auth_token(app), json=create.model_dump()) assert res.status_code == 200 assert len(res.json().split(".")) == 3 @@ -410,7 +410,7 @@ def test_bot() -> None: client = TestClient(app) res = client.get("/v1/bots/0", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == bot.to_dto().dict() + assert res.json() == bot.to_dto().model_dump() @pytest.mark.unit_test @@ -424,11 +424,11 @@ def test_all_bots() -> None: client = TestClient(app) res = client.get("/v1/bots", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == [b.to_dto().dict() for b in bots] + assert res.json() == [b.to_dto().model_dump() for b in bots] res = client.get("/v1/bots?owner=4", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == [b.to_dto().dict() for b in bots] + assert res.json() == [b.to_dto().model_dump() for b in bots] service.get_all_bots.assert_called_once() service.get_all_bots_by_owner.assert_called_once() diff --git a/tests/matrixstore/test_matrix_editor.py b/tests/matrixstore/test_matrix_editor.py index 2907cfb793..e12a8b4dd7 100644 --- a/tests/matrixstore/test_matrix_editor.py +++ b/tests/matrixstore/test_matrix_editor.py @@ -70,7 +70,7 @@ class TestMatrixSlice: ) def test_init(self, kwargs: Dict[str, Any], expected: Dict[str, Any]) -> None: obj = MatrixSlice(**kwargs) - assert obj.dict(by_alias=False) == expected + assert obj.model_dump(by_alias=False) == expected class TestOperation: @@ -97,12 +97,12 @@ class TestOperation: ) def test_init(self, kwargs: Dict[str, Any], expected: Dict[str, Any]) -> None: obj = Operation(**kwargs) - assert obj.dict(by_alias=False) == expected + assert obj.model_dump(by_alias=False) == expected @pytest.mark.parametrize("operation", list(OPERATIONS)) def test_init__valid_operation(self, operation: str) -> None: obj = Operation(operation=operation, value=123) - assert obj.dict(by_alias=False) == { + assert obj.model_dump(by_alias=False) == { "operation": operation, "value": 123.0, } @@ -192,4 +192,4 @@ class TestMatrixEditInstruction: ) def test_init(self, kwargs: Dict[str, Any], expected: Dict[str, Any]) -> None: obj = MatrixEditInstruction(**kwargs) - assert obj.dict(by_alias=False) == expected + assert obj.model_dump(by_alias=False) == expected diff --git a/tests/matrixstore/test_web.py b/tests/matrixstore/test_web.py index 36b7fc366d..b09bbde1cd 100644 --- a/tests/matrixstore/test_web.py +++ b/tests/matrixstore/test_web.py @@ -62,7 +62,7 @@ def test_create() -> None: json=matrix_data, ) assert res.status_code == 200 - assert res.json() == matrix.dict() + assert res.json() == matrix.model_dump() @pytest.mark.unit_test @@ -84,7 +84,7 @@ def test_get() -> None: client = TestClient(app) res = client.get("/v1/matrix/123", headers=create_auth_token(app)) assert res.status_code == 200 - assert res.json() == matrix.dict() + assert res.json() == matrix.model_dump() service.get.assert_called_once_with("123") diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py index 77287d52ca..4628f49c3f 100644 --- a/tests/storage/business/test_arealink_manager.py +++ b/tests/storage/business/test_arealink_manager.py @@ -350,7 +350,7 @@ def test_get_all_area(): }, ] areas = area_manager.get_all_areas(study, AreaType.AREA) - assert expected_areas == [area.dict() for area in areas] + assert expected_areas == [area.model_dump() for area in areas] expected_clusters = [ { @@ -363,7 +363,7 @@ def test_get_all_area(): } ] clusters = area_manager.get_all_areas(study, AreaType.DISTRICT) - assert expected_clusters == [area.dict() for area in clusters] + assert expected_clusters == [area.model_dump() for area in clusters] file_tree_mock.get.side_effect = [{}, {}, {}] expected_all = [ @@ -401,14 +401,14 @@ def test_get_all_area(): }, ] all_areas = area_manager.get_all_areas(study) - assert expected_all == [area.dict() for area in all_areas] + assert expected_all == [area.model_dump() for area in all_areas] links = link_manager.get_all_links(study) assert [ {"area1": "a1", "area2": "a2", "ui": None}, {"area1": "a1", "area2": "a3", "ui": None}, {"area1": "a2", "area2": "a3", "ui": None}, - ] == [link.dict() for link in links] + ] == [link.model_dump() for link in links] def test_update_area(): diff --git a/tests/storage/business/test_xpansion_manager.py b/tests/storage/business/test_xpansion_manager.py index 717c71bd4a..14d67a048f 100644 --- a/tests/storage/business/test_xpansion_manager.py +++ b/tests/storage/business/test_xpansion_manager.py @@ -197,7 +197,7 @@ def test_get_xpansion_settings(tmp_path: Path, version: int, expected_output: JS xpansion_manager.create_xpansion_configuration(study) actual = xpansion_manager.get_xpansion_settings(study) - assert actual.dict(by_alias=True) == expected_output + assert actual.model_dump(by_alias=True) == expected_output @pytest.mark.unit_test @@ -244,7 +244,7 @@ def test_update_xpansion_settings(tmp_path: Path) -> None: "timelimit": int(1e12), "sensitivity_config": {"epsilon": 10500.0, "projection": ["foo"], "capex": False}, } - assert actual.dict(by_alias=True) == expected + assert actual.model_dump(by_alias=True) == expected @pytest.mark.unit_test @@ -284,13 +284,13 @@ def test_add_candidate(tmp_path: Path) -> None: xpansion_manager.add_candidate(study, new_candidate) - candidates = {"1": new_candidate.dict(by_alias=True, exclude_none=True)} + candidates = {"1": new_candidate.model_dump(by_alias=True, exclude_none=True)} actual = empty_study.tree.get(["user", "expansion", "candidates"]) assert actual == candidates xpansion_manager.add_candidate(study, new_candidate2) - candidates["2"] = new_candidate2.dict(by_alias=True, exclude_none=True) + candidates["2"] = new_candidate2.model_dump(by_alias=True, exclude_none=True) actual = empty_study.tree.get(["user", "expansion", "candidates"]) assert actual == candidates @@ -488,14 +488,14 @@ def test_add_resources(tmp_path: Path) -> None: settings = xpansion_manager.get_xpansion_settings(study) settings.yearly_weights = filename3 - update_settings = UpdateXpansionSettings(**settings.dict()) + update_settings = UpdateXpansionSettings(**settings.model_dump()) xpansion_manager.update_xpansion_settings(study, update_settings) with pytest.raises(FileCurrentlyUsedInSettings): xpansion_manager.delete_resource(study, XpansionResourceFileType.WEIGHTS, filename3) settings.yearly_weights = "" - update_settings = UpdateXpansionSettings(**settings.dict()) + update_settings = UpdateXpansionSettings(**settings.model_dump()) xpansion_manager.update_xpansion_settings(study, update_settings) xpansion_manager.delete_resource(study, XpansionResourceFileType.WEIGHTS, filename3) diff --git a/tests/storage/rawstudies/test_factory.py b/tests/storage/rawstudies/test_factory.py index e2cd4391b3..7b4841e295 100644 --- a/tests/storage/rawstudies/test_factory.py +++ b/tests/storage/rawstudies/test_factory.py @@ -53,8 +53,8 @@ def test_factory_cache() -> None: cache.get.return_value = None study = factory.create_from_fs(path, study_id) assert study.config == config - cache.put.assert_called_once_with(cache_id, FileStudyTreeConfigDTO.from_build_config(config).dict()) + cache.put.assert_called_once_with(cache_id, FileStudyTreeConfigDTO.from_build_config(config).model_dump()) - cache.get.return_value = FileStudyTreeConfigDTO.from_build_config(config).dict() + cache.get.return_value = FileStudyTreeConfigDTO.from_build_config(config).model_dump() study = factory.create_from_fs(path, study_id) assert study.config == config diff --git a/tests/storage/test_model.py b/tests/storage/test_model.py index d17d6c89a4..85767a846c 100644 --- a/tests/storage/test_model.py +++ b/tests/storage/test_model.py @@ -55,5 +55,5 @@ def test_file_study_tree_config_dto(): enr_modelling="aggregated", ) config_dto = FileStudyTreeConfigDTO.from_build_config(config) - assert sorted(list(config_dto.dict()) + ["cache"]) == sorted(list(config.__dict__)) + assert sorted(list(config_dto.model_dump()()) + ["cache"]) == sorted(list(config.__dict__)) assert config_dto.to_build_config() == config diff --git a/tests/storage/test_service.py b/tests/storage/test_service.py index 7ebf94a09e..b003f83353 100644 --- a/tests/storage/test_service.py +++ b/tests/storage/test_service.py @@ -1379,7 +1379,7 @@ def test_unarchive_output(tmp_path: Path) -> None: src=str(tmp_path / "output" / f"{output_id}.zip"), dest=str(tmp_path / "output" / output_id), remove_src=False, - ).dict(), + ).model_dump(), name=f"Unarchive output {study_name}/{output_id} ({study_id})", ref_id=study_id, request_params=RequestParameters(user=DEFAULT_ADMIN_USER), @@ -1510,7 +1510,7 @@ def test_archive_output_locks(tmp_path: Path) -> None: src=str(tmp_path / "output" / f"{output_id}.zip"), dest=str(tmp_path / "output" / output_id), remove_src=False, - ).dict(), + ).model_dump(), name=f"Unarchive output {study_name}/{output_id} ({study_id})", ref_id=study_id, request_params=RequestParameters(user=DEFAULT_ADMIN_USER), diff --git a/tests/storage/web/test_studies_bp.py b/tests/storage/web/test_studies_bp.py index 60be36203c..d4328406c2 100644 --- a/tests/storage/web/test_studies_bp.py +++ b/tests/storage/web/test_studies_bp.py @@ -554,9 +554,9 @@ def test_output_download(tmp_path: Path) -> None: client = TestClient(app, raise_server_exceptions=False) res = client.post( f"/v1/studies/{UUID}/outputs/my-output-id/download", - json=study_download.dict(), + json=study_download.model_dump(), ) - assert res.json() == output_data.dict() + assert res.json() == output_data.model_dump() @pytest.mark.unit_test diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 74089bc7cd..ac7a438ba1 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -135,7 +135,7 @@ def test_get_all_storages__nominal_case( # Check actual = { - area_id: [form.dict(by_alias=True) for form in clusters_by_ids.values()] + area_id: [form.model_dump(by_alias=True) for form in clusters_by_ids.values()] for area_id, clusters_by_ids in all_storages.items() } expected = { @@ -241,7 +241,7 @@ def test_get_st_storages__nominal_case( groups = manager.get_storages(study, area_id="West") # Check - actual = [form.dict(by_alias=True) for form in groups] + actual = [form.model_dump(by_alias=True) for form in groups] expected = [ { "efficiency": 0.94, @@ -353,7 +353,7 @@ def test_get_st_storage__nominal_case( edit_form = manager.get_storage(study, area_id="West", storage_id="storage1") # Assert that the returned storage fields match the expected fields - actual = edit_form.dict(by_alias=True) + actual = edit_form.model_dump(by_alias=True) expected = { "efficiency": 0.94, "group": STStorageGroup.BATTERY, @@ -535,7 +535,7 @@ def test_get_matrix__nominal_case( matrix = manager.get_matrix(study, area_id="West", storage_id="storage1", ts_name="inflows") # Assert that the returned storage fields match the expected fields - actual = matrix.dict(by_alias=True) + actual = matrix.model_dump(by_alias=True) assert actual == matrix def test_get_matrix__config_not_found( diff --git a/tests/study/business/areas/test_thermal_management.py b/tests/study/business/areas/test_thermal_management.py index dd52ec9538..03866ac640 100644 --- a/tests/study/business/areas/test_thermal_management.py +++ b/tests/study/business/areas/test_thermal_management.py @@ -132,7 +132,7 @@ def test_get_cluster__study_legacy( form = manager.get_cluster(study, area_id="north", cluster_id="2 avail and must 1") # Assert that the returned fields match the expected fields - actual = form.dict(by_alias=True) + actual = form.model_dump(by_alias=True) expected = { "id": "2 avail and must 1", "group": ThermalClusterGroup.GAS, @@ -198,7 +198,7 @@ def test_get_clusters__study_legacy( groups = manager.get_clusters(study, area_id="north") # Assert that the returned fields match the expected fields - actual = [form.dict(by_alias=True) for form in groups] + actual = [form.model_dump(by_alias=True) for form in groups] expected = [ { "id": "2 avail and must 1", @@ -354,7 +354,7 @@ def test_create_cluster__study_legacy( form = manager.create_cluster(study, area_id="north", cluster_data=cluster_data) # Assert that the returned fields match the expected fields - actual = form.dict(by_alias=True) + actual = form.model_dump(by_alias=True) expected = { "co2": 12.59, "enabled": True, @@ -414,7 +414,7 @@ def test_update_cluster( # Assert that the returned fields match the expected fields form = manager.get_cluster(study, area_id="north", cluster_id="2 avail and must 1") - actual = form.dict(by_alias=True) + actual = form.model_dump(by_alias=True) expected = { "id": "2 avail and must 1", "group": ThermalClusterGroup.GAS, diff --git a/tests/study/business/test_all_optional_metaclass.py b/tests/study/business/test_all_optional_metaclass.py index 83a859ac5c..1dfc44bf32 100644 --- a/tests/study/business/test_all_optional_metaclass.py +++ b/tests/study/business/test_all_optional_metaclass.py @@ -189,7 +189,7 @@ def test_initialization(self, cls: t.Type[AllOptionalModel]) -> None: # If we convert the model to a dictionary, without `None` values, # we should have a dictionary with default values only. - actual = obj.dict(exclude_none=True) + actual = obj.model_dump(exclude_none=True) expected = { "mandatory_with_default": 0.2, "optional_with_default": 0.2, @@ -305,7 +305,7 @@ def test_initialization(self, cls: t.Type[UseNoneModel]) -> None: # If we convert the model to a dictionary, without `None` values, # we should have an empty dictionary. - actual = obj.dict(exclude_none=True) + actual = obj.model_dump(exclude_none=True) expected = {} assert actual == expected diff --git a/tests/study/business/test_allocation_manager.py b/tests/study/business/test_allocation_manager.py index 82f49b2ec4..cfc889fed9 100644 --- a/tests/study/business/test_allocation_manager.py +++ b/tests/study/business/test_allocation_manager.py @@ -35,7 +35,7 @@ def test_base(self): def test_camel_case(self): field = AllocationField(areaId="NORTH", coefficient=1) - assert field.dict(by_alias=True) == { + assert field.model_dump(by_alias=True) == { "areaId": "NORTH", "coefficient": 1, } diff --git a/tests/study/storage/variantstudy/model/test_dbmodel.py b/tests/study/storage/variantstudy/model/test_dbmodel.py index 6ed1bbcba1..e541d6b8ab 100644 --- a/tests/study/storage/variantstudy/model/test_dbmodel.py +++ b/tests/study/storage/variantstudy/model/test_dbmodel.py @@ -152,7 +152,7 @@ def test_init(self, db_session: Session, variant_study_id: str) -> None: # check CommandBlock.to_dto() dto = obj.to_dto() # note: it is easier to compare the dict representation of the DTO - assert dto.dict() == { + assert dto.model_dump() == { "id": command_id, "action": command, "args": json.loads(args), diff --git a/tests/study/storage/variantstudy/test_snapshot_generator.py b/tests/study/storage/variantstudy/test_snapshot_generator.py index e9de3da131..de069ab462 100644 --- a/tests/study/storage/variantstudy/test_snapshot_generator.py +++ b/tests/study/storage/variantstudy/test_snapshot_generator.py @@ -851,7 +851,7 @@ def test_generate__nominal_case( assert len(db_recorder.sql_statements) == 5, str(db_recorder) # Check: the variant generation must succeed. - assert results.dict() == { + assert results.model_dump() == { "success": True, "details": [ { @@ -1036,7 +1036,7 @@ def test_generate__with_denormalize_true( ) # Check the results - assert results.dict() == { + assert results.model_dump() == { "success": True, "details": [ { @@ -1159,7 +1159,7 @@ def test_generate__notification_failure( ) # Check the results - assert results.dict() == { + assert results.model_dump() == { "success": True, "details": [ { @@ -1241,7 +1241,7 @@ def test_generate__variant_of_variant( ) # Check the results - assert results.dict() == { + assert results.model_dump() == { "success": True, "details": [ { diff --git a/tests/variantstudy/model/command/test_create_cluster.py b/tests/variantstudy/model/command/test_create_cluster.py index 6554bbe6c2..b473c9df24 100644 --- a/tests/variantstudy/model/command/test_create_cluster.py +++ b/tests/variantstudy/model/command/test_create_cluster.py @@ -148,7 +148,7 @@ def test_to_dto(self, command_context: CommandContext): prepro_id = command_context.matrix_service.create(prepro) modulation_id = command_context.matrix_service.create(modulation) dto = command.to_dto() - assert dto.dict() == { + assert dto.model_dump() == { "action": "create_cluster", "args": { "area_id": "foo", diff --git a/tests/variantstudy/model/command/test_create_renewables_cluster.py b/tests/variantstudy/model/command/test_create_renewables_cluster.py index 78e6dcf15e..423b8150f8 100644 --- a/tests/variantstudy/model/command/test_create_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_create_renewables_cluster.py @@ -119,7 +119,7 @@ def test_to_dto(self, command_context: CommandContext) -> None: command_context=command_context, ) dto = command.to_dto() - assert dto.dict() == { + assert dto.model_dump() == { "action": "create_renewables_cluster", # "renewables" with a final "s". "args": { "area_id": "foo", diff --git a/tests/variantstudy/model/command/test_remove_link.py b/tests/variantstudy/model/command/test_remove_link.py index 2704a54013..c95147e838 100644 --- a/tests/variantstudy/model/command/test_remove_link.py +++ b/tests/variantstudy/model/command/test_remove_link.py @@ -58,7 +58,7 @@ def test_remove_link__validation(self, area1: str, area2: str, expected: t.Dict[ and that the areas are well-ordered in alphabetical order (Antares Solver convention). """ command = RemoveLink(area1=area1, area2=area2, command_context=Mock(spec=CommandContext)) - actual = command.dict(include={"area1", "area2"}) + actual = command.model_dump(include={"area1", "area2"}) assert actual == expected @staticmethod diff --git a/tests/variantstudy/model/test_variant_model.py b/tests/variantstudy/model/test_variant_model.py index 98c73b949f..89e10bd3c0 100644 --- a/tests/variantstudy/model/test_variant_model.py +++ b/tests/variantstudy/model/test_variant_model.py @@ -141,7 +141,7 @@ def test_commands_service( repository=variant_study_service.repository, ) results = generator.generate_snapshot(saved_id, jwt_user, denormalize=False) - assert results.dict() == { + assert results.model_dump() == { "success": True, "details": [ {