Skip to content

Commit

Permalink
Merge pull request #22 from ecmwf/develop
Browse files Browse the repository at this point in the history
Release 0.6.0: interactive tool
  • Loading branch information
corentincarton authored Nov 16, 2023
2 parents 058bf6d + 41e690c commit 0811c5c
Show file tree
Hide file tree
Showing 26 changed files with 1,643 additions and 411 deletions.
7 changes: 1 addition & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: conda-incubator/setup-miniconda@v2
- uses: mamba-org/setup-micromamba@v1
with:
activate-environment: test
environment-file: environment.yml
auto-activate-base: false
- name: conda check
shell: bash -l {0}
run: |
conda info
conda list
- name: install hat package
shell: bash -l {0}
run: pip install .
Expand Down
7 changes: 1 addition & 6 deletions .github/workflows/on-push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: conda-incubator/setup-miniconda@v2
- uses: mamba-org/setup-micromamba@v1
with:
activate-environment: test
environment-file: environment.yml
auto-activate-base: false
- name: Conda check
shell: bash -l {0}
run: |
conda info
conda list
- name: install hat package
shell: bash -l {0}
run: pip install .
Expand Down
8 changes: 4 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ repos:
rev: 23.3.0
hooks:
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
# - repo: https://github.com/PyCQA/flake8
# rev: 6.0.0
# hooks:
# - id: flake8
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ Interfaces and functionality are likely to change, and the project itself may be

Clone source code repository

$ git clone git@github.com:ecmwf-projects/hat.git
$ git clone https://github.com/ecmwf/hat.git

Create conda python environment

Expand Down Expand Up @@ -63,4 +63,4 @@ does it submit to any jurisdiction.

### Citing

In publications, please use a link to this repository (https://github.com/ecmwf/hat) and its documentation (https://hydro-analysis-toolkit.readthedocs.io)
In publications, please use a link to this repository (https://github.com/ecmwf/hat) and its documentation (https://hydro-analysis-toolkit.readthedocs.io)
8 changes: 4 additions & 4 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@ name: hat
channels:
- conda-forge
dependencies:
- python=3.10
- python<=3.10
- netCDF4
- eccodes
- cfgrib
- cftime
- geopandas
- xarray
- plotly
- matplotlib
- jupyterlab
- jupyter
- tqdm
- typer
- humanize
- folium
- typer
- ipyleaflet
- ipywidgets
- pip
# - pytest
# - mkdocs
Expand Down
71 changes: 0 additions & 71 deletions hat/clock.py

This file was deleted.

7 changes: 5 additions & 2 deletions hat/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,9 @@ def get_tmpdir():
else:
tmpdir = TemporaryDirectory().name

# Ensure the directory exists
os.makedirs(tmpdir, exist_ok=True)

return tmpdir


Expand Down Expand Up @@ -257,8 +260,8 @@ def save_dataset_to_netcdf(ds: xr.Dataset, fpath: str):
ds.to_netcdf(fpath)


def find_main_var(ds):
variable_names = [k for k in ds.variables if len(ds.variables[k].dims) >= 3]
def find_main_var(ds, min_dim=3):
variable_names = [k for k in ds.variables if len(ds.variables[k].dims) >= min_dim]
if len(variable_names) > 1:
raise Exception("More than one variable in dataset")
elif len(variable_names) == 0:
Expand Down
30 changes: 18 additions & 12 deletions hat/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def filter_dataframe(df, filters: str):
return df


def filter_timeseries(sims_ds: xr.Dataset, obs_ds: xr.Dataset, threshold=80):
def filter_timeseries(sims_ds: xr.DataArray, obs_ds: xr.DataArray, threshold=80):
"""Clean the simulation and observation timeseries
Only keep..
Expand All @@ -159,30 +159,36 @@ def filter_timeseries(sims_ds: xr.Dataset, obs_ds: xr.Dataset, threshold=80):
matching_stations = sorted(
set(sims_ds.station.values).intersection(obs_ds.station.values)
)
print(len(matching_stations))
sims_ds = sims_ds.sel(station=matching_stations)
obs_ds = obs_ds.sel(station=matching_stations)
obs_ds = obs_ds.sel(time=sims_ds.time)

obs_ds = obs_ds.dropna(dim="station", how="all")
sims_ds = sims_ds.sel(station=obs_ds.station)

# Only keep observations in the same time period as the simulations
obs_ds = obs_ds.where(sims_ds.time == obs_ds.time, drop=True)
# obs_ds = obs_ds.where(sims_ds.time == obs_ds.time, drop=True)

# Only keep obsevations with enough valid data in this timeperiod

# discharge data
dis = obs_ds.obsdis
print(sims_ds)
print(obs_ds)

# Replace negative values with NaN
dis = dis.where(dis >= 0)
# dis = dis.where(dis >= 0)

# Percentage of valid discharge data at each point in time
valid_percent = dis.notnull().mean(dim="time") * 100
# # Percentage of valid discharge data at each point in time
# valid_percent = dis.notnull().mean(dim="time") * 100

# Boolean index of where there is enough valid data
enough_observation_data = valid_percent > threshold
# # Boolean index of where there is enough valid data
# enough_observation_data = valid_percent > threshold

# keep where there is enough observation data
obs_ds = obs_ds.where(enough_observation_data, drop=True)
# # keep where there is enough observation data
# obs_ds = obs_ds.where(enough_observation_data, drop=True)

# keep simulation that match remaining observations
sims_ds = sims_ds.where(enough_observation_data, drop=True)
# # keep simulation that match remaining observations
# sims_ds = sims_ds.where(enough_observation_data, drop=True)

return (sims_ds, obs_ds)
30 changes: 0 additions & 30 deletions hat/graphs.py

This file was deleted.

Loading

0 comments on commit 0811c5c

Please sign in to comment.