diff --git a/etrago/analyze/plot.py b/etrago/analyze/plot.py index d84462b4..d64e9e4e 100644 --- a/etrago/analyze/plot.py +++ b/etrago/analyze/plot.py @@ -842,16 +842,18 @@ def calc_dispatch_per_carrier(network, timesteps, dispatch_type="total"): ] dist = pd.Series( - index=pd.MultiIndex.from_tuples(index, names=["bus", "carrier"]), + index=pd.MultiIndex.from_tuples( + index, names=["bus", "carrier"] + ).unique(), dtype=float, - ) + ).sort_index() for i in dist.index: gens = network.generators[ (network.generators.bus == i[0]) & (network.generators.carrier == i[1]) ].index - dist[i] = ( + dist.loc[i] = ( ( network.generators_t.p[gens].transpose()[ network.snapshots[timesteps] diff --git a/etrago/appl.py b/etrago/appl.py index 224e25ed..7ab6796a 100644 --- a/etrago/appl.py +++ b/etrago/appl.py @@ -680,21 +680,6 @@ def run_etrago(args, json_path): # import network from database etrago.build_network_from_db() - # drop generators without p_nom - etrago.network.mremove( - "Generator", - etrago.network.generators[ - etrago.network.generators.p_nom==0].index - ) - - # Temporary drop DLR as it is currently not working with sclopf - if (etrago.args["method"]["type"] == "sclopf") & ( - not etrago.network.lines_t.s_max_pu.empty): - print("Setting s_max_pu timeseries to 1") - etrago.network.lines_t.s_max_pu = pd.DataFrame( - index=etrago.network.snapshots, - ) - # adjust network regarding eTraGo setting etrago.adjust_network() @@ -703,16 +688,15 @@ def run_etrago(args, json_path): # spatial clustering etrago.spatial_clustering() - etrago.spatial_clustering_gas() - etrago.network.links.loc[etrago.network.links.carrier=="CH4", "p_nom"] *= 100 - etrago.network.generators_t.p_max_pu.where(etrago.network.generators_t.p_max_pu>1e-5, other=0., inplace=True) + # snapshot clustering etrago.snapshot_clustering() # skip snapshots etrago.skip_snapshots() + # start linear optimal powerflow calculations etrago.optimize() # conduct lopf with full complex timeseries for dispatch disaggregation @@ -722,7 +706,6 @@ def run_etrago(args, json_path): etrago.pf_post_lopf() # spatial disaggregation - # needs to be adjusted for new sectors etrago.spatial_disaggregation() # calculate central etrago results diff --git a/etrago/cluster/gas.py b/etrago/cluster/gas.py index 5768b3cd..743403bd 100644 --- a/etrago/cluster/gas.py +++ b/etrago/cluster/gas.py @@ -433,20 +433,20 @@ def gas_postprocessing( }, one_port_strategies={ "Generator": { - "marginal_cost": np.mean, - "capital_cost": np.mean, - "p_nom_max": np.sum, - "p_nom_min": np.sum, - "e_nom_max": np.sum, + "marginal_cost": "mean", + "capital_cost": "mean", + "p_nom_max": "sum", + "p_nom_min": "sum", + "e_nom_max": "sum", }, "Store": { - "marginal_cost": np.mean, - "capital_cost": np.mean, - "e_nom": np.sum, + "marginal_cost": "mean", + "capital_cost": "mean", + "e_nom": "sum", "e_nom_max": sum_with_inf, }, "Load": { - "p_set": np.sum, + "p_set": "sum", }, }, ) diff --git a/etrago/cluster/spatial.py b/etrago/cluster/spatial.py index 96124b32..e61c915e 100755 --- a/etrago/cluster/spatial.py +++ b/etrago/cluster/spatial.py @@ -127,38 +127,38 @@ def strategies_lines(): def strategies_one_ports(): return { "StorageUnit": { - "marginal_cost": np.mean, - "capital_cost": np.mean, - "efficiency_dispatch": np.mean, - "standing_loss": np.mean, - "efficiency_store": np.mean, - "p_min_pu": np.min, + "marginal_cost": "mean", + "capital_cost": "mean", + "efficiency_dispatch": "mean", + "standing_loss": "mean", + "efficiency_store": "mean", + "p_min_pu": "min", "p_nom_extendable": ext_storage, "p_nom_max": sum_with_inf, }, "Store": { - "marginal_cost": np.mean, - "capital_cost": np.mean, - "standing_loss": np.mean, - "e_nom": np.sum, - "e_nom_min": np.sum, + "marginal_cost": "mean", + "capital_cost": "mean", + "standing_loss": "mean", + "e_nom": "sum", + "e_nom_min": "sum", "e_nom_max": sum_with_inf, - "e_initial": np.sum, - "e_min_pu": np.mean, - "e_max_pu": np.mean, + "e_initial": "sum", + "e_min_pu": "mean", + "e_max_pu": "mean", }, } def strategies_generators(): return { - "p_nom_min": np.min, + "p_nom_min": "min", "p_nom_max": sum_with_inf, - "weight": np.sum, - "p_nom": np.sum, - "p_nom_opt": np.sum, - "marginal_cost": np.mean, - "capital_cost": np.mean, + "weight": "sum", + "p_nom": "sum", + "p_nom_opt": "sum", + "marginal_cost": "mean", + "capital_cost": "mean", "e_nom_max": sum_with_inf, } @@ -169,30 +169,30 @@ def strategies_links(): "bus0": _make_consense_links, "bus1": _make_consense_links, "carrier": _make_consense_links, - "p_nom": np.sum, + "p_nom": "sum", "p_nom_extendable": _make_consense_links, "p_nom_max": sum_with_inf, - "capital_cost": np.mean, - "length": np.mean, + "capital_cost": "mean", + "length": "mean", "geom": nan_links, "topo": nan_links, "type": nan_links, - "efficiency": np.mean, - "p_nom_min": np.sum, - "p_set": np.mean, - "p_min_pu": np.min, - "p_max_pu": np.max, - "marginal_cost": np.mean, + "efficiency": "mean", + "p_nom_min": "sum", + "p_set": "mean", + "p_min_pu": "min", + "p_max_pu": "max", + "marginal_cost": "mean", "terrain_factor": _make_consense_links, - "p_nom_opt": np.mean, + "p_nom_opt": "mean", "country": nan_links, - "build_year": np.mean, - "lifetime": np.mean, - "min_up_time": np.mean, - "min_down_time": np.mean, - "up_time_before": np.mean, - "down_time_before": np.mean, - "committable": np.all, + "build_year": "mean", + "lifetime": "mean", + "min_up_time": "mean", + "min_down_time": "mean", + "up_time_before": "mean", + "down_time_before": "mean", + "committable": "all", } @@ -261,7 +261,7 @@ def arrange_dc_bus0_bus1(network): strategies.pop("topo") strategies.pop("geom") - new_df = links.groupby(grouper, axis=0).agg(strategies) + new_df = links.groupby(grouper).agg(strategies) new_df.index = flatten_multiindex(new_df.index).rename("name") new_df = pd.concat( [new_df, network.links.loc[~links_agg_b]], axis=0, sort=False @@ -281,7 +281,7 @@ def arrange_dc_bus0_bus1(network): df_agg = df_agg.multiply( weighting.loc[df_agg.columns], axis=1 ) - pnl_df = df_agg.groupby(grouper, axis=1).sum() + pnl_df = df_agg.T.groupby(grouper).sum().T pnl_df.columns = flatten_multiindex(pnl_df.columns).rename( "name" ) @@ -790,7 +790,7 @@ def kmedoids_dijkstra_clustering( kmeans.fit(points) busmap = pd.Series( - data=kmeans.predict(buses.loc[buses_i, ["x", "y"]]), + data=kmeans.predict(buses.loc[buses_i, ["x", "y"]].values), index=buses_i, dtype=object, ) diff --git a/etrago/execute/market_optimization.py b/etrago/execute/market_optimization.py index 7db69528..639de840 100644 --- a/etrago/execute/market_optimization.py +++ b/etrago/execute/market_optimization.py @@ -209,7 +209,6 @@ def optimize_with_rolling_horizon( n.storage_units.state_of_charge_initial = ( n.storage_units_t.state_of_charge.loc[snapshots[start - 1]] ) - print(i) # Make sure that state of charge of batteries and pumped hydro # plants are cyclic over the year by using the state_of_charges # from the pre_market_model @@ -302,7 +301,7 @@ def build_market_model(self): logger.info("Start market zone specifc clustering") - self.clustering, busmap = postprocessing( + clustering, busmap = postprocessing( self, busmap, busmap_foreign, @@ -312,9 +311,7 @@ def build_market_model(self): apply_on="market_model", ) - self.update_busmap(busmap) - - net = self.clustering.network + net = clustering.network # links_col = net.links.columns ac = net.lines[net.lines.carrier == "AC"] str1 = "transshipment_" diff --git a/etrago/tools/utilities.py b/etrago/tools/utilities.py index 5173f406..c5f91093 100755 --- a/etrago/tools/utilities.py +++ b/etrago/tools/utilities.py @@ -24,10 +24,13 @@ from collections.abc import Mapping from copy import deepcopy +from pathlib import Path +from urllib.request import urlretrieve import json import logging import math import os +import zipfile from pyomo.environ import Constraint, PositiveReals, Var import geoalchemy2 @@ -37,7 +40,7 @@ import sqlalchemy.exc if "READTHEDOCS" not in os.environ: - from shapely.geometry import Point + from shapely.geometry import LineString, Point import geopandas as gpd from etrago.tools import db @@ -288,9 +291,27 @@ def buses_by_country(self, apply_on="grid_model"): con = self.engine germany_sh = gpd.read_postgis(query, con, geom_col="geometry") - path = gpd.datasets.get_path("naturalearth_lowres") - shapes = gpd.read_file(path) - shapes = shapes[shapes.name.isin([*countries])].set_index(keys="name") + # read Europe borders. Original data downloaded from naturalearthdata.com/ + # under Public Domain license + path_countries = Path(".") / "data" / "shapes_europe" + + if not os.path.exists(path_countries): + path_countries.mkdir(exist_ok=True, parents=True) + url_countries = ( + "https://naciscdn.org/naturalearth/110m/cultural/" + + "ne_110m_admin_0_countries.zip" + ) + urlretrieve(url_countries, path_countries / "shape_countries.zip") + with zipfile.ZipFile( + path_countries / "shape_countries.zip", "r" + ) as zip_ref: + zip_ref.extractall(path_countries) + + shapes = ( + gpd.read_file(path_countries) + .rename(columns={"NAME": "name"}) + .set_index("name") + ) # Use Germany borders from egon-data if not using the SH test case if len(germany_sh.gen.unique()) > 1: @@ -2982,6 +3003,20 @@ def manual_fixes_datamodel(etrago): inplace=True, ) + # Temporary drop DLR as it is currently not working with sclopf + if (etrago.args["method"]["type"] == "sclopf") & ( + not etrago.network.lines_t.s_max_pu.empty + ): + print( + """ + Dynamic line rating is not implemented for the sclopf yet. + Setting s_max_pu timeseries to 1 + """ + ) + etrago.network.lines_t.s_max_pu = pd.DataFrame( + index=etrago.network.snapshots, + ) + def select_elec_network(etrago, apply_on="grid_model"): """ @@ -3209,34 +3244,148 @@ def find_buses_area(etrago, carrier): return buses_area.index -def adjust_before_optimization(self): +def export_to_shapefile(pypsa_network, shape_files_path=None, srid=4326): + """ + Translates all component DataFrames within the pypsa network + to GeoDataFrames and saves them to shape files. - def check_e_initial(etrago): - stores = etrago.network.stores - stores_t = etrago.network.stores_t - for st in stores_t["e_max_pu"].columns: - e_initial_pu = stores.at[st, "e_initial"] / stores.at[st, "e_nom"] - min_e = stores_t["e_min_pu"].iloc[0, :][st] - max_e = stores_t["e_max_pu"].iloc[0, :][st] - if (e_initial_pu >= min_e) & (e_initial_pu <= max_e): - continue - else: - stores.at[st, "e_initial"] = ( - stores.at[st, "e_nom"] * (min_e + max_e) / 2 - ) + Shape files can be used to plot the network in QGIS. - return stores + Currently, only the AC network is exported. - # Temporary drop DLR as it is currently not working with sclopf - if self.args["method"]["type"] != "lopf": - self.network.lines_t.s_max_pu = pd.DataFrame( - index=self.network.snapshots, - columns=self.network.lines.index, - data=1.0, - ) + Parameters + ---------- + pypsa_network : PyPSA network + PyPSA network as in etrago.network. + shape_files_path : str or None + If provided, geodataframes are saved as shapefiles to given directory. + Default: None. + srid : int + SRID bus coordinates are given in. Per default WGS84 is assumed. + Default: 4326. + + Returns + ------- + dict + Dictionary with geodataframes. + + """ + os.makedirs(shape_files_path, exist_ok=True) + + # convert buses_df + buses_df = pypsa_network.buses[pypsa_network.buses.carrier == "AC"] + buses_df = buses_df.assign( + geometry=gpd.points_from_xy(buses_df.x, buses_df.y, crs=f"EPSG:{srid}") + ).drop(columns=["x", "y", "geom"]) - self.network.storage_units.cyclic_state_of_charge = True + buses_gdf = gpd.GeoDataFrame(buses_df, crs=f"EPSG:{srid}") - self.network.lines.loc[self.network.lines.r == 0.0, "r"] = 10 + # convert component DataFrames + components = [ + "generators", + "loads", + "storage_units", + "stores", + "transformers", + ] + components_dict = {"buses_gdf": buses_gdf} + + for component in components: + left_on = "bus1" if component == "transformers" else "bus" + + attr = getattr(pypsa_network, component) + + components_dict[f"{component}_gdf"] = gpd.GeoDataFrame( + attr.merge( + buses_gdf[["geometry", "v_nom"]], + left_on=left_on, + right_index=True, + ), + crs=f"EPSG:{srid}", + ) + if components_dict[f"{component}_gdf"].empty: + components_dict[f"{component}_gdf"].index = components_dict[ + f"{component}_gdf" + ].index.astype(object) + + # convert lines + lines_df = pypsa_network.lines + lines_df = lines_df.drop(columns=["geom"]) + + lines_gdf = lines_df.merge( + buses_gdf[["geometry", "v_nom"]].rename( + columns={"geometry": "geom_0"} + ), + left_on="bus0", + right_index=True, + ) + lines_gdf = lines_gdf.merge( + buses_gdf[["geometry"]].rename(columns={"geometry": "geom_1"}), + left_on="bus1", + right_index=True, + ) + lines_gdf["geometry"] = lines_gdf.apply( + lambda _: LineString([_["geom_0"], _["geom_1"]]), axis=1 + ) + lines_gdf = gpd.GeoDataFrame(lines_gdf, crs=f"EPSG:{srid}") + components_dict["lines_gdf"] = lines_gdf + + save_cols = { + "buses_gdf": ["scn_name", "v_nom", "carrier", "country", "geometry"], + "generators_gdf": [ + "scn_name", + "bus", + "carrier", + "p_nom", + "p_nom_extendable", + "v_nom", + "geometry", + ], + "loads_gdf": ["scn_name", "bus", "carrier", "v_nom", "geometry"], + "storage_units_gdf": [ + "scn_name", + "bus", + "carrier", + "p_nom", + "p_nom_extendable", + "v_nom", + "geometry", + ], + "stores_gdf": [ + "scn_name", + "bus", + "carrier", + "e_nom", + "e_nom_extendable", + "v_nom", + "geometry", + ], + "transformers_gdf": [ + "scn_name", + "bus0", + "bus1", + "x", + "r", + "s_nom", + "s_nom_extendable", + "geometry", + ], + "lines_gdf": [ + "bus0", + "bus1", + "x", + "r", + "s_nom", + "s_nom_extendable", + "length", + "num_parallel", + "geometry", + "v_nom", + ], + } + if shape_files_path: + for k, v in components_dict.items(): + shp_filename = os.path.join(shape_files_path, f"{k}.shp") + v.loc[:, save_cols[k]].to_file(shp_filename) - self.network.stores = check_e_initial(self) + return components_dict