Skip to content

Commit

Permalink
Merge branch 'dev' into features/#665-interface-pre-market-market-opt…
Browse files Browse the repository at this point in the history
…imization
  • Loading branch information
ClaraBuettner committed Feb 12, 2024
2 parents cd1fbca + 71654cb commit 7213e2f
Show file tree
Hide file tree
Showing 15 changed files with 413 additions and 255 deletions.
3 changes: 2 additions & 1 deletion etrago/appl.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,11 @@
"db": "egon-data", # database session
"gridversion": None, # None for model_draft or Version number
"method": { # Choose method and settings for optimization
"type": "market_grid", # type of optimization, currently only 'lopf'
"type": "market_grid", # type of optimization, 'lopf' or 'market_grid'
"n_iter": 1, # abort criterion of iterative optimization, 'n_iter' or 'threshold'
"pyomo": True, # set if pyomo is used for model building
"formulation": "pyomo",
"market_zones": "status_quo", # only used if type='market_grid'
"rolling_horizon": { # Define parameter of market optimization
"planning_horizon": 72, # number of snapshots in each optimization
"overlap": 24, # number of overlapping hours
Expand Down
3 changes: 2 additions & 1 deletion etrago/args.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
"method": {
"type": "lopf",
"n_iter": 4,
"pyomo": true
"pyomo": true,
"market_zones": "status_quo"
},
"pf_post_lopf": {
"active": false,
Expand Down
20 changes: 11 additions & 9 deletions etrago/cluster/disaggregation.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,9 @@ def from_busmap(x):
if not left_external_connectors.empty:
ca_option = pd.get_option("mode.chained_assignment")
pd.set_option("mode.chained_assignment", None)
left_external_connectors.loc[
:, "bus0"
] = left_external_connectors.loc[:, "bus0"].apply(from_busmap)
left_external_connectors.loc[:, "bus0"] = (
left_external_connectors.loc[:, "bus0"].apply(from_busmap)
)
pd.set_option("mode.chained_assignment", ca_option)
external_buses = pd.concat(
(external_buses, left_external_connectors.bus0)
Expand All @@ -139,9 +139,9 @@ def from_busmap(x):
if not right_external_connectors.empty:
ca_option = pd.get_option("mode.chained_assignment")
pd.set_option("mode.chained_assignment", None)
right_external_connectors.loc[
:, "bus1"
] = right_external_connectors.loc[:, "bus1"].apply(from_busmap)
right_external_connectors.loc[:, "bus1"] = (
right_external_connectors.loc[:, "bus1"].apply(from_busmap)
)
pd.set_option("mode.chained_assignment", ca_option)
external_buses = pd.concat(
(external_buses, right_external_connectors.bus1)
Expand Down Expand Up @@ -738,9 +738,11 @@ def solve_partial_network(
weight = reduce(
multiply,
(
filtered.loc[:, key]
if not timed(key)
else pn_t[key].loc[:, filtered.index]
(
filtered.loc[:, key]
if not timed(key)
else pn_t[key].loc[:, filtered.index]
)
for key in weights[s]
),
1,
Expand Down
12 changes: 6 additions & 6 deletions etrago/cluster/gas.py
Original file line number Diff line number Diff line change
Expand Up @@ -514,12 +514,12 @@ def gas_postprocessing(etrago, busmap, medoid_idx=None):
]
if len(h2_idx) > 0:
h2_idx = h2_idx.index.tolist()[0]
network_gasgrid_c.buses.loc[
h2_idx, "x"
] = etrago.network.buses["x"].loc[medoid]
network_gasgrid_c.buses.loc[
h2_idx, "y"
] = etrago.network.buses["y"].loc[medoid]
network_gasgrid_c.buses.loc[h2_idx, "x"] = (
etrago.network.buses["x"].loc[medoid]
)
network_gasgrid_c.buses.loc[h2_idx, "y"] = (
etrago.network.buses["y"].loc[medoid]
)

network_gasgrid_c.buses.loc[i, "x"] = etrago.network.buses.loc[
medoid, "x"
Expand Down
72 changes: 36 additions & 36 deletions etrago/cluster/snapshot.py
Original file line number Diff line number Diff line change
Expand Up @@ -373,9 +373,9 @@ def segmentation_extreme_periods(
if date < maxi:
i = i + 1
else:
timeseries[
"SegmentDuration_Extreme"
] = timeseries.index.get_level_values("SegmentDuration")
timeseries["SegmentDuration_Extreme"] = (
timeseries.index.get_level_values("SegmentDuration")
)
old_row = timeseries.iloc[i].copy()
old_row = pd.DataFrame(old_row).transpose()

Expand All @@ -395,12 +395,12 @@ def segmentation_extreme_periods(
if new_date.isin(
timeseries.index.get_level_values("dates")
):
timeseries[
"dates"
] = timeseries.index.get_level_values("dates")
timeseries[
"SegmentNo"
] = timeseries.index.get_level_values("SegmentNo")
timeseries["dates"] = (
timeseries.index.get_level_values("dates")
)
timeseries["SegmentNo"] = (
timeseries.index.get_level_values("SegmentNo")
)
timeseries["SegmentDuration"] = timeseries[
"SegmentDuration_Extreme"
]
Expand Down Expand Up @@ -428,12 +428,12 @@ def segmentation_extreme_periods(
for col in new_row.columns:
new_row[col][0] = old_row[col][0]

timeseries[
"dates"
] = timeseries.index.get_level_values("dates")
timeseries[
"SegmentNo"
] = timeseries.index.get_level_values("SegmentNo")
timeseries["dates"] = (
timeseries.index.get_level_values("dates")
)
timeseries["SegmentNo"] = (
timeseries.index.get_level_values("SegmentNo")
)
timeseries["SegmentDuration"] = timeseries[
"SegmentDuration_Extreme"
]
Expand All @@ -457,9 +457,9 @@ def segmentation_extreme_periods(
else:
if i == -1:
i = 0
max_val[
"SegmentDuration"
] = timeseries.index.get_level_values("SegmentDuration")[i]
max_val["SegmentDuration"] = (
timeseries.index.get_level_values("SegmentDuration")[i]
)
max_val.set_index(
["dates", "SegmentNo", "SegmentDuration"], inplace=True
)
Expand Down Expand Up @@ -496,9 +496,9 @@ def segmentation_extreme_periods(
if date < mini:
i = i + 1
else:
timeseries[
"SegmentDuration_Extreme"
] = timeseries.index.get_level_values("SegmentDuration")
timeseries["SegmentDuration_Extreme"] = (
timeseries.index.get_level_values("SegmentDuration")
)
old_row = timeseries.iloc[i].copy()
old_row = pd.DataFrame(old_row).transpose()

Expand All @@ -518,12 +518,12 @@ def segmentation_extreme_periods(
if new_date.isin(
timeseries.index.get_level_values("dates")
):
timeseries[
"dates"
] = timeseries.index.get_level_values("dates")
timeseries[
"SegmentNo"
] = timeseries.index.get_level_values("SegmentNo")
timeseries["dates"] = (
timeseries.index.get_level_values("dates")
)
timeseries["SegmentNo"] = (
timeseries.index.get_level_values("SegmentNo")
)
timeseries["SegmentDuration"] = timeseries[
"SegmentDuration_Extreme"
]
Expand All @@ -550,12 +550,12 @@ def segmentation_extreme_periods(
)
for col in new_row.columns:
new_row[col][0] = old_row[col][0]
timeseries[
"dates"
] = timeseries.index.get_level_values("dates")
timeseries[
"SegmentNo"
] = timeseries.index.get_level_values("SegmentNo")
timeseries["dates"] = (
timeseries.index.get_level_values("dates")
)
timeseries["SegmentNo"] = (
timeseries.index.get_level_values("SegmentNo")
)
timeseries["SegmentDuration"] = timeseries[
"SegmentDuration_Extreme"
]
Expand All @@ -579,9 +579,9 @@ def segmentation_extreme_periods(
else:
if i == -1:
i = 0
min_val[
"SegmentDuration"
] = timeseries.index.get_level_values("SegmentDuration")[i]
min_val["SegmentDuration"] = (
timeseries.index.get_level_values("SegmentDuration")[i]
)
min_val.set_index(
["dates", "SegmentNo", "SegmentDuration"], inplace=True
)
Expand Down
6 changes: 3 additions & 3 deletions etrago/cluster/spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -591,9 +591,9 @@ def kmean_clustering(etrago, selected_network, weight, n_clusters):
if kmean_settings["use_reduced_coordinates"]:
# TODO : FIX THIS HACK THAT HAS UNEXPECTED SIDE-EFFECTS,
# i.e. network is changed in place!!
network.buses.loc[
busmap.index, ["x", "y"]
] = network.buses.loc[busmap, ["x", "y"]].values
network.buses.loc[busmap.index, ["x", "y"]] = (
network.buses.loc[busmap, ["x", "y"]].values
)

clustering = get_clustering_from_busmap(
network,
Expand Down
38 changes: 17 additions & 21 deletions etrago/execute/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,9 +278,9 @@ def iterate_lopf(
]
etrago.network_tsa.transformers.s_nom_extendable = False

etrago.network_tsa.storage_units[
"p_nom"
] = etrago.network.storage_units["p_nom_opt"]
etrago.network_tsa.storage_units["p_nom"] = (
etrago.network.storage_units["p_nom_opt"]
)
etrago.network_tsa.storage_units["p_nom_extendable"] = False

etrago.network_tsa.stores["e_nom"] = etrago.network.stores["e_nom_opt"]
Expand Down Expand Up @@ -471,13 +471,13 @@ def dispatch_disaggregation(self):
index=transits,
)
for storage in self.network.storage_units.index:
self.conduct_dispatch_disaggregation[
storage
] = self.network.storage_units_t.state_of_charge[storage]
self.conduct_dispatch_disaggregation[storage] = (
self.network.storage_units_t.state_of_charge[storage]
)
for store in sto.index:
self.conduct_dispatch_disaggregation[
store
] = self.network.stores_t.e[store]
self.conduct_dispatch_disaggregation[store] = (
self.network.stores_t.e[store]
)

extra_func = self.args["extra_functionality"]
self.args["extra_functionality"] = {}
Expand Down Expand Up @@ -517,9 +517,9 @@ def dispatch_disaggregation(self):
self.network.transformers.s_nom_extendable = (
self.network_tsa.transformers.s_nom_extendable
)
self.network.storage_units[
"p_nom_extendable"
] = self.network_tsa.storage_units["p_nom_extendable"]
self.network.storage_units["p_nom_extendable"] = (
self.network_tsa.storage_units["p_nom_extendable"]
)
self.network.stores["e_nom_extendable"] = self.network_tsa.stores[
"e_nom_extendable"
]
Expand Down Expand Up @@ -807,9 +807,9 @@ def drop_foreign_components(network):

# Assign generators control strategy
ac_bus = network.buses[network.buses.carrier == "AC"]
network.generators.control[
network.generators.bus.isin(ac_bus.index)
] = "PV"
network.generators.control[network.generators.bus.isin(ac_bus.index)] = (
"PV"
)
network.generators.control[
network.generators.carrier == "load shedding"
] = "PQ"
Expand Down Expand Up @@ -1049,9 +1049,7 @@ def calc_line_losses(network, converged):
"""
# Line losses
# calculate apparent power S = sqrt(p² + q²) [in MW]
s0_lines = (network.lines_t.p0**2 + network.lines_t.q0**2).apply(
np.sqrt
)
s0_lines = (network.lines_t.p0**2 + network.lines_t.q0**2).apply(np.sqrt)
# in case some snapshots did not converge, discard them from the
# calculation
s0_lines.loc[converged[converged is False].index, :] = np.nan
Expand All @@ -1061,9 +1059,7 @@ def calc_line_losses(network, converged):
)
# calculate losses per line and timestep network.\
# lines_t.line_losses = I² * R [in MW]
network.lines_t.losses = np.divide(
i0_lines**2 * network.lines.r, 1000000
)
network.lines_t.losses = np.divide(i0_lines**2 * network.lines.r, 1000000)
# calculate total losses per line [in MW]
network.lines = network.lines.assign(
losses=np.sum(network.lines_t.losses).values
Expand Down
14 changes: 5 additions & 9 deletions etrago/execute/grid_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,9 +216,13 @@ def add_redispatch_generators(self):
# Set maximum feed-in limit for ramp up generators based on feed-in of
# (disaggregated) generators from the market optimization and potential
# feedin time series
p_max_pu_all = self.network.get_switchable_as_dense(
"Generator", "p_max_pu"
)

self.network.generators_t.p_max_pu.loc[:, gens_redispatch + " ramp_up"] = (
(
self.network.generators_t.p_max_pu.loc[:, gens_redispatch].mul(
p_max_pu_all.loc[:, gens_redispatch].mul(
self.network.generators.loc[gens_redispatch, "p_nom"]
)
- (self.market_model.generators_t.p.loc[self.network.snapshots,
Expand Down Expand Up @@ -319,14 +323,6 @@ def add_redispatch_generators(self):
# self.network.buses.index.isin(['47085', '47086', '37865', '37870'
# ])].index, inplace=True)

# TEMPORAL
self.network.generators.loc[
self.network.generators.index.str.contains("run_of_river"), "p_max_pu"
] = 0.65
self.network.generators.loc[
self.network.generators.index.str.contains("reservoir"), "p_max_pu"
] = 0.65


def extra_functionality():
return None
Loading

0 comments on commit 7213e2f

Please sign in to comment.