From 24a795d970a8c144c06ea72f161bc531b72377c4 Mon Sep 17 00:00:00 2001 From: Jonas_Gothe Date: Tue, 10 Dec 2024 15:18:10 +0100 Subject: [PATCH] read in shape files and market zone specific clustering of the market_model --- etrago/appl.py | 7 +- etrago/execute/__init__.py | 2 +- etrago/execute/market_optimization.py | 192 +++++++++++++++++++++++++- 3 files changed, 196 insertions(+), 5 deletions(-) diff --git a/etrago/appl.py b/etrago/appl.py index d5f74368..b5a583f7 100644 --- a/etrago/appl.py +++ b/etrago/appl.py @@ -58,7 +58,7 @@ "market_optimization": { "active": True, - "market_zones": "status_quo", # only used if type='market_grid' + "market_zones": "5_zones", # only used if type='market_grid' "rolling_horizon": {# Define parameter of market optimization "planning_horizon": 168, # number of snapshots in each optimization "overlap": 120, # number of overlapping hours @@ -80,8 +80,9 @@ "method": 2, "crossover": 0, "logFile": "solver_etrago.log", - "threads": 4, + "threads": 7, }, + "model_formulation": "kirchhoff", # angles or kirchhoff "scn_name": "eGon2035", # scenario: eGon2035, eGon100RE or status2019 # Scenario variations: @@ -138,7 +139,7 @@ "n_init": 10, # affects clustering algorithm, only change when neccesary "max_iter": 100, # affects clustering algorithm, only change when neccesary "tol": 1e-6, # affects clustering algorithm, only change when neccesary - "CPU_cores": 4, # number of cores used during clustering, "max" for all cores available. + "CPU_cores": 7, # number of cores used during clustering, "max" for all cores available. }, "sector_coupled_clustering": { "active": True, # choose if clustering is activated diff --git a/etrago/execute/__init__.py b/etrago/execute/__init__.py index 7d00e861..fb9d3c9a 100644 --- a/etrago/execute/__init__.py +++ b/etrago/execute/__init__.py @@ -218,7 +218,7 @@ def run_lopf(etrago, extra_functionality, method): solver_name=etrago.args["solver"], solver_options=etrago.args["solver_options"], extra_functionality=extra_functionality, - formulation=etrago.args["model_formulation"], + #formulation=etrago.args["model_formulation"], ) if status != "ok": logger.warning( diff --git a/etrago/execute/market_optimization.py b/etrago/execute/market_optimization.py index 47436a7e..9b30f40a 100644 --- a/etrago/execute/market_optimization.py +++ b/etrago/execute/market_optimization.py @@ -27,8 +27,10 @@ import logging from pypsa.components import component_attrs + from shapely.geometry import Point import pandas as pd - + import geopandas as gpd + from etrago.cluster.electrical import postprocessing, preprocessing from etrago.tools.constraints import Constraints @@ -240,6 +242,7 @@ def optimize_with_rolling_horizon( return n + def build_market_model(self): """Builds market model based on imported network from eTraGo @@ -291,7 +294,193 @@ def build_market_model(self): net.buses.cluster.astype(int).astype(str), net.buses.index ) medoid_idx = pd.Series(dtype=str) + + + elif ( + self.args["method"]["market_optimization"]["market_zones"] + == "2_zones" + ): + + + # Load shapefile for all three zones (Zone_1, Zone_2, Zone_3) + zones = gpd.read_file("/home/dozeumcui/Masterarbeit/Shape_Dateien/2_Zonen.shp").to_crs(epsg=4326) + + # Explode multi-polygons + zones = zones.explode(index_parts=False).reset_index(drop=True) + + # Convert net.buses to a GeoDataFrame + geometry = [ + Point(xy) for xy in zip(net.buses["x"].values, net.buses["y"].values) + ] + geo_buses = gpd.GeoDataFrame(net.buses, geometry=geometry, crs="EPSG:4326") + + # Spatial join to assign zones + geo_buses = gpd.sjoin(geo_buses, zones[["geometry", "id"]], how="left", predicate="within") + + def assign_zone(row): + if pd.notnull(row["id"]): + if row["id"] == 1: + return "Zone_1" + elif row["id"] == 2: + return "Zone_2" + else: + # If zone_id is NaN, use the country value + return row["country"] + + geo_buses["marketzone"] = geo_buses.apply(assign_zone, axis=1) + + # Assign clusters based on the zones + geo_buses["cluster"] = geo_buses.groupby("marketzone").ngroup() + net.buses["cluster"] = geo_buses["cluster"] + + busmap = pd.Series( + net.buses.cluster.astype(int).astype(str), net.buses.index + ) + medoid_idx = pd.Series(dtype=str) + + + + + elif ( + self.args["method"]["market_optimization"]["market_zones"] + == "3_zones" + ): + + + # Load shapefile for all three zones (Zone_1, Zone_2, Zone_3) + zones = gpd.read_file("/home/dozeumcui/Masterarbeit/Shape_Dateien/3_Zonen.shp").to_crs(epsg=4326) + + # Explode multi-polygons + zones = zones.explode(index_parts=False).reset_index(drop=True) + + # Convert net.buses to a GeoDataFrame + geometry = [ + Point(xy) for xy in zip(net.buses["x"].values, net.buses["y"].values) + ] + geo_buses = gpd.GeoDataFrame(net.buses, geometry=geometry, crs="EPSG:4326") + + # Spatial join to assign zones + geo_buses = gpd.sjoin(geo_buses, zones[["geometry", "id"]], how="left", predicate="within") + + def assign_zone(row): + if pd.notnull(row["id"]): + if row["id"] == 1: + return "Zone_1" + elif row["id"] == 2: + return "Zone_2" + elif row["id"] == 3: + return "Zone_3" + else: + # If zone_id is NaN, use the country value + return row["country"] + + geo_buses["marketzone"] = geo_buses.apply(assign_zone, axis=1) + + # Assign clusters based on the zones + geo_buses["cluster"] = geo_buses.groupby("marketzone").ngroup() + net.buses["cluster"] = geo_buses["cluster"] + + busmap = pd.Series( + net.buses.cluster.astype(int).astype(str), net.buses.index + ) + medoid_idx = pd.Series(dtype=str) + + + elif ( + self.args["method"]["market_optimization"]["market_zones"] + == "4_zones" + ): + + # Load shapefile for all three zones (Zone_1, Zone_2, Zone_3) + zones = gpd.read_file("/home/dozeumcui/Masterarbeit/Shape_Dateien/4_Zonen.shp").to_crs(epsg=4326) + + # Explode multi-polygons + zones = zones.explode(index_parts=False).reset_index(drop=True) + + # Convert net.buses to a GeoDataFrame + geometry = [ + Point(xy) for xy in zip(net.buses["x"].values, net.buses["y"].values) + ] + geo_buses = gpd.GeoDataFrame(net.buses, geometry=geometry, crs="EPSG:4326") + + # Spatial join to assign zones + geo_buses = gpd.sjoin(geo_buses, zones[["geometry", "id"]], how="left", predicate="within") + + def assign_zone(row): + if pd.notnull(row["id"]): + if row["id"] == 1: + return "Zone_1" + elif row["id"] == 2: + return "Zone_2" + elif row["id"] == 3: + return "Zone_3" + elif row["id"] == 4: + return "Zone_4" + else: + # If zone_id is NaN, use the country value + return row["country"] + + geo_buses["marketzone"] = geo_buses.apply(assign_zone, axis=1) + + # Assign clusters based on the zones + geo_buses["cluster"] = geo_buses.groupby("marketzone").ngroup() + net.buses["cluster"] = geo_buses["cluster"] + busmap = pd.Series( + net.buses.cluster.astype(int).astype(str), net.buses.index + ) + medoid_idx = pd.Series(dtype=str) + + + + elif ( + self.args["method"]["market_optimization"]["market_zones"] + == "5_zones" + ): + + + # Load shapefile for all three zones (Zone_1, Zone_2, Zone_3) + zones = gpd.read_file("/home/dozeumcui/Masterarbeit/Shape_Dateien/5_Zonen.shp").to_crs(epsg=4326) + + # Explode multi-polygons if necessary + zones = zones.explode(index_parts=False).reset_index(drop=True) + + # Convert net.buses to a GeoDataFrame + geometry = [ + Point(xy) for xy in zip(net.buses["x"].values, net.buses["y"].values) + ] + geo_buses = gpd.GeoDataFrame(net.buses, geometry=geometry, crs="EPSG:4326") + + # Spatial join to assign zones + geo_buses = gpd.sjoin(geo_buses, zones[["geometry", "id"]], how="left", predicate="within") + + def assign_zone(row): + if pd.notnull(row["id"]): + if row["id"] == 1: + return "Zone_1" + elif row["id"] == 2: + return "Zone_2" + elif row["id"] == 3: + return "Zone_3" + elif row["id"] == 4: + return "Zone_4" + elif row["id"] == 5: + return "Zone_5" + else: + # If zone_id is NaN, use the country value + return row["country"] + + geo_buses["marketzone"] = geo_buses.apply(assign_zone, axis=1) + + # Assign clusters based on the zones + geo_buses["cluster"] = geo_buses.groupby("marketzone").ngroup() + net.buses["cluster"] = geo_buses["cluster"] + + busmap = pd.Series( + net.buses.cluster.astype(int).astype(str), net.buses.index + ) + medoid_idx = pd.Series(dtype=str) + else: logger.warning( f""" @@ -301,6 +490,7 @@ def build_market_model(self): logger.info("Start market zone specifc clustering") + clustering, busmap = postprocessing( self, busmap,