Skip to content

Commit

Permalink
Merge pull request #7 from rodekruis/waterlevel_fix
Browse files Browse the repository at this point in the history
Waterlevel fix
  • Loading branch information
JanvE97 authored Dec 12, 2024
2 parents ff0d5b7 + ef97197 commit aa4587e
Show file tree
Hide file tree
Showing 8 changed files with 416 additions and 247 deletions.
10 changes: 7 additions & 3 deletions flash_flood_pipeline/data_download/collect_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@
METEO_RAIN_SENSOR,
)
from itertools import compress
import logging

logger = logging.getLogger(__name__)


class dataGetter:
Expand Down Expand Up @@ -218,6 +221,7 @@ def get_rain_forecast(self):
or not expected_cosmo_hindcast_path.exists()
):
# switch to gfs
logger.info("COSMO-data not found, switching to GFS-data")
hindcast_start = round_to_nearest_hour(datetime.now()) - timedelta(
days=2, hours=3
)
Expand Down Expand Up @@ -307,6 +311,7 @@ def get_rain_forecast(self):
)
gfs_data[row["placeCode"]] = gfs_precipitation
else:
logger.info("Retrieving COSMO-data")
ta_gdf_4326 = self.ta_gdf.copy()
ta_gdf_4326.to_crs(4326, inplace=True)

Expand Down Expand Up @@ -388,8 +393,7 @@ def get_rain_forecast(self):
# val = val.set_index("datetime")
# combined_rainfall.append(val)

# print(pd.concat(combined_rainfall, axis=1).head)
# pd.concat(combined_rainfall, axis=1).to_csv(
# r"d:\VSCode\IBF-flash-flood-pipeline\flash_flood_pipeline\rainfall_test_gfs_sampling.csv"
# )
# r"C:\Users\923265\Downloads\gfs_rainfall_prediction.csv"

return gfs_data
22 changes: 13 additions & 9 deletions flash_flood_pipeline/data_upload/upload_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@
KARONGA_PLACECODES,
RUMPHI_PLACECODES,
BLANTYRE_PLACECODES,
THRESHOLD_CORRECTION_VALUES,
)
from mapping_tables.exposure_mapping_tables import (
EXPOSURE_TYPES,
TA_EXPOSURE_DICT,
POINT_EXPOSURE_DICT,
GEOSERVER_EXPOSURE_DICT,
)
from utils.api import api_post_request
Expand Down Expand Up @@ -79,11 +79,20 @@ def upload_and_trigger_tas(self):
ta_exposure_trigger = self.TA_exposure.copy()

def determine_ta_trigger_state(row):
if row["placeCode"] in THRESHOLD_CORRECTION_VALUES:
threshold = int(
ALERT_THRESHOLD_VALUE
+ THRESHOLD_CORRECTION_VALUES.get(row["placeCode"])
)
logger.info(
f"Adjusting threshold for {row['placeCode']} from {ALERT_THRESHOLD_VALUE} to {threshold}"
)
else:
threshold = ALERT_THRESHOLD_VALUE

if pd.isnull(row[ALERT_THRESHOLD_PARAMETER]):
return 0
elif row[
ALERT_THRESHOLD_PARAMETER
] > ALERT_THRESHOLD_VALUE and self.lead_time not in [
elif row[ALERT_THRESHOLD_PARAMETER] > threshold and self.lead_time not in [
"24-hour",
"48-hour",
]:
Expand Down Expand Up @@ -114,9 +123,6 @@ def determine_ta_trigger_state(row):
exposed_tas = triggered_tas.loc[triggered_tas["trigger_value"] == 1]

if len(exposed_tas) > 0:
print(distr_name)
print(exposed_tas)

for key, value in EXPOSURE_TYPES.items():
exposure_df = exposed_tas.astype({key: "float"}).astype(
{key: "int"}
Expand All @@ -135,7 +141,6 @@ def determine_ta_trigger_state(row):
.to_dict("records")
)
body["date"] = self.date.strftime("%Y-%m-%dT%H:%M:%SZ")
print(body)
api_post_request("admin-area-dynamic-data/exposure", body=body)

body = TA_EXPOSURE_DICT
Expand All @@ -149,7 +154,6 @@ def determine_ta_trigger_state(row):
.to_dict("records")
)
body["date"] = self.date.strftime("%Y-%m-%dT%H:%M:%SZ")
print(body)
api_post_request("admin-area-dynamic-data/exposure", body=body)

def expose_point_assets(self):
Expand Down
11 changes: 11 additions & 0 deletions flash_flood_pipeline/logger_config/configure_logger.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import datetime


def configure_logger():
Expand All @@ -11,13 +12,23 @@ def configure_logger():
level=logging.INFO,
filename="ex.log",
)

# set up logging to console
console = logging.StreamHandler()
console.setLevel(logging.INFO)

log_file = logging.FileHandler(
rf"data/logs/container_log_{datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}.log"
)
log_file.setLevel(logging.INFO)

# set a format which is simpler for console use
formatter = logging.Formatter("%(asctime)s : %(levelname)s : %(message)s")
console.setFormatter(formatter)
log_file.setFormatter(formatter)

logging.getLogger("").addHandler(console)
logging.getLogger("").addHandler(log_file)

global logger
logger = logging.getLogger(__name__)
38 changes: 32 additions & 6 deletions flash_flood_pipeline/runPipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,13 @@
import geopandas as gpd
import numpy as np
import logging
from settings.base import DATA_FOLDER, ASSET_TYPES, ENVIRONMENT
from settings.base import (
DATA_FOLDER,
ASSET_TYPES,
ENVIRONMENT,
ALERT_THRESHOLD_VALUE,
THRESHOLD_CORRECTION_VALUES,
)
from logger_config.configure_logger import configure_logger
from data_download.collect_data import dataGetter
from data_upload.upload_results import DataUploader
Expand All @@ -14,7 +20,7 @@
from utils.vector_utils.combine_vector_data import combine_vector_data
from utils.api import api_post_request

from scenario_selector import scenarioSelector
from scenario_selection.scenario_selector import scenarioSelector
import pandas as pd

import sys
Expand All @@ -41,18 +47,25 @@ def determine_trigger_states(
if karonga_events:
karonga_triggered_list = []
for key, value in karonga_events.items():
if key in THRESHOLD_CORRECTION_VALUES:
threshold_value = (
ALERT_THRESHOLD_VALUE + THRESHOLD_CORRECTION_VALUES.get(key)
)
else:
threshold_value = ALERT_THRESHOLD_VALUE
region_file = gpd.read_file(
str(DATA_FOLDER / value / "region_statistics.gpkg")
)
affected_people = region_file[region_file["placeCode"] == key][
"affected_people"
].values[0]

if affected_people is not None:
karonga_triggered_list.append(
region_file[region_file["placeCode"] == key][
"affected_people"
].values[0]
> 20
> threshold_value
)
else:
karonga_triggered_list.append(False)
Expand All @@ -63,6 +76,12 @@ def determine_trigger_states(
if rumphi_events:
rumphi_triggered_list = []
for key, value in rumphi_events.items():
if key in THRESHOLD_CORRECTION_VALUES:
threshold_value = (
ALERT_THRESHOLD_VALUE + THRESHOLD_CORRECTION_VALUES.get(key)
)
else:
threshold_value = ALERT_THRESHOLD_VALUE
region_file = gpd.read_file(
str(DATA_FOLDER / value / "region_statistics.gpkg")
)
Expand All @@ -74,7 +93,7 @@ def determine_trigger_states(
region_file[region_file["placeCode"] == key][
"affected_people"
].values[0]
> 20
> threshold_value
)
else:
rumphi_triggered_list.append(False)
Expand All @@ -85,6 +104,13 @@ def determine_trigger_states(
if blantyre_events:
blantyre_triggered_list = []
for key, value in blantyre_events.items():
if key in THRESHOLD_CORRECTION_VALUES:
threshold_value = (
ALERT_THRESHOLD_VALUE + THRESHOLD_CORRECTION_VALUES.get(key)
)
else:
threshold_value = ALERT_THRESHOLD_VALUE

region_file = gpd.read_file(
str(DATA_FOLDER / value / "region_statistics.gpkg")
)
Expand All @@ -96,7 +122,7 @@ def determine_trigger_states(
region_file[region_file["placeCode"] == key][
"affected_people"
].values[0]
> 20
> threshold_value
)
else:
blantyre_triggered_list.append(False)
Expand Down Expand Up @@ -259,7 +285,7 @@ def main():

# step (2): scenarioselector: choose scenario per ta
logger.info("step 2 started: scenario selection")
scenarios_selector = scenarioSelector(gfs_data)
scenarios_selector = scenarioSelector(gfs_data=gfs_data)
(
karonga_leadtime,
karonga_events,
Expand Down
Loading

0 comments on commit aa4587e

Please sign in to comment.