Skip to content

Commit

Permalink
Merge branch 'main' into spec0-calendar
Browse files Browse the repository at this point in the history
  • Loading branch information
Zeitsperre authored Sep 17, 2024
2 parents 20d45b3 + 08656c1 commit 9245be5
Show file tree
Hide file tree
Showing 9 changed files with 76 additions and 65 deletions.
17 changes: 17 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ New features and enhancements
Bug fixes
^^^^^^^^^
* Fixed a small inefficiency in ``_otc_adjust``, and the `standardize` method of `OTC/dOTC` is now applied on individual variable (:pull:`1890`, :pull:`1896`).
* Remove deprecated cells in the tutorial notebook `sdba.ipynb` (:pull:`1895`).

Breaking changes
^^^^^^^^^^^^^^^^
Expand All @@ -49,6 +50,22 @@ Internal changes
* ``xclim.core`` now uses absolute imports for clarity and some objects commonly used in the module have been moved to hidden submodules. (:issue:`1719`, :pull:`1881`).
* ``xclim.core.indicator.Parameter`` has a new attribute ``compute_name`` while ``xclim.core.indicator.Indicator`` lost its ``_variable_mapping``. The translation from parameter (and variable) names in the indicator to the names on the compute function is handled by ``Indicator._get_compute_args``. (:pull:`1885`).

v0.52.2 (2024-09-16)
--------------------
Contributors to this version: Pascal Bourgault (:user:`aulemahal`).

Bug fixes
^^^^^^^^^
* Fixed ``decimal_year`` import, fixed functions ``rate2amount``, ``amount2rate``, ``time_bnds`` and ``stack_periods`` for `xarray` version 2024.09.0. Removed ``datetime_to_decimal_year`` as the mirrored `xarray` function was replaced by ``ds.time.dt.decimal_year``. (:pull:`1920`).

v0.52.1 (2024-09-11)
--------------------
Contributors to this version: Trevor James Smith (:user:`Zeitsperre`).

Bug fixes
^^^^^^^^^
* Adjusted the required base version of `pyarrow` to be `v10.0.1` to address an environment resolution error on conda-forge. (:pull:`1918`).

v0.52.0 (2024-08-08)
--------------------
Contributors to this version: David Huard (:user:`huard`), Trevor James Smith (:user:`Zeitsperre`), Hui-Min Wang (:user:`Hem-W`), Éric Dupuis (:user:`coxipi`), Sarah Gammon (:user:`SarahG-579462`), Pascal Bourgault (:user:`aulemahal`), Juliette Lavoie (:user:`juliettelavoie`), Adrien Lamarche (:user:`LamAdr`).
Expand Down
83 changes: 33 additions & 50 deletions docs/notebooks/sdba.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -641,52 +641,30 @@
"metadata": {},
"outputs": [],
"source": [
"dref[\"pr\"] = dref.pr.chunk({\"location\": 1})\n",
"dref[\"tasmax\"] = dref.tasmax.chunk({\"location\": 1})\n",
"from xclim.core.units import convert_units_to\n",
"from xclim.testing import open_dataset\n",
"\n",
"dhist[\"pr\"] = dhist.pr.chunk({\"location\": 1})\n",
"dhist[\"tasmax\"] = dhist.tasmax.chunk({\"location\": 1})\n",
"dref = open_dataset(\n",
" \"sdba/ahccd_1950-2013.nc\", chunks={\"location\": 1}, drop_variables=[\"lat\", \"lon\"]\n",
").sel(time=slice(\"1981\", \"2010\"))\n",
"\n",
"dsim[\"pr\"] = dsim.pr.chunk({\"location\": 1})\n",
"dsim[\"tasmax\"] = dsim.tasmax.chunk({\"location\": 1})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"##### Perform an initial univariate adjustment."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# additive for tasmax\n",
"QDMtx = sdba.QuantileDeltaMapping.train(\n",
" dref.tasmax, dhist.tasmax, nquantiles=20, kind=\"+\", group=\"time\"\n",
"# Fix the standard name of the `pr` variable.\n",
"# This allows the convert_units_to below to infer the correct CF transformation (precip rate to flux)\n",
"# see the \"Unit handling\" notebook\n",
"dref.pr.attrs[\"standard_name\"] = \"lwe_precipitation_rate\"\n",
"\n",
"dref = dref.assign(\n",
" tasmax=convert_units_to(dref.tasmax, \"K\"),\n",
" pr=convert_units_to(dref.pr, \"kg m-2 s-1\"),\n",
")\n",
"# Adjust both hist and sim, we'll feed both to the Npdf transform.\n",
"scenh_tx = QDMtx.adjust(dhist.tasmax)\n",
"scens_tx = QDMtx.adjust(dsim.tasmax)\n",
"\n",
"# remove == 0 values in pr:\n",
"dref[\"pr\"] = sdba.processing.jitter_under_thresh(dref.pr, \"0.01 mm d-1\")\n",
"dhist[\"pr\"] = sdba.processing.jitter_under_thresh(dhist.pr, \"0.01 mm d-1\")\n",
"dsim[\"pr\"] = sdba.processing.jitter_under_thresh(dsim.pr, \"0.01 mm d-1\")\n",
"\n",
"# multiplicative for pr\n",
"QDMpr = sdba.QuantileDeltaMapping.train(\n",
" dref.pr, dhist.pr, nquantiles=20, kind=\"*\", group=\"time\"\n",
"dsim = open_dataset(\n",
" \"sdba/CanESM2_1950-2100.nc\", chunks={\"location\": 1}, drop_variables=[\"lat\", \"lon\"]\n",
")\n",
"# Adjust both hist and sim, we'll feed both to the Npdf transform.\n",
"scenh_pr = QDMpr.adjust(dhist.pr)\n",
"scens_pr = QDMpr.adjust(dsim.pr)\n",
"\n",
"# Stack variables : Dataset -> DataArray with `multivar` dimension\n",
"dref, dhist, dsim = (sdba.stack_variables(da) for da in (dref, dhist, dsim))"
"dhist = dsim.sel(time=slice(\"1981\", \"2010\"))\n",
"dsim = dsim.sel(time=slice(\"2041\", \"2070\"))\n",
"\n",
"dref"
]
},
{
Expand All @@ -702,9 +680,14 @@
"metadata": {},
"outputs": [],
"source": [
"# Stack variables : Dataset -> DataArray with `multivar` dimension\n",
"ref = sdba.processing.stack_variables(dref)\n",
"hist = sdba.processing.stack_variables(dhist)\n",
"sim = sdba.processing.stack_variables(dsim)\n",
"\n",
"ADJ = sdba.MBCn.train(\n",
" dref,\n",
" dhist,\n",
" ref,\n",
" hist,\n",
" base_kws={\"nquantiles\": 20, \"group\": \"time\"},\n",
" adj_kws={\"interp\": \"nearest\", \"extrapolation\": \"constant\"},\n",
" n_iter=20, # perform 20 iteration\n",
Expand All @@ -714,8 +697,8 @@
"scenh, scens = (\n",
" ADJ.adjust(\n",
" sim=ds,\n",
" ref=dref,\n",
" hist=dhist,\n",
" ref=ref,\n",
" hist=hist,\n",
" base=sdba.QuantileDeltaMapping,\n",
" base_kws_vars={\n",
" \"pr\": {\n",
Expand All @@ -725,9 +708,9 @@
" },\n",
" \"tasmax\": {\"kind\": \"+\"},\n",
" },\n",
" adj_kws={\"interp\": \"nearest\", \"extrapolation\": \"constant\"},\n",
" adj_kws={\"interp\": \"linear\", \"extrapolation\": \"constant\"},\n",
" )\n",
" for ds in (dhist, dsim)\n",
" for ds in (hist, sim)\n",
")"
]
},
Expand Down Expand Up @@ -767,12 +750,12 @@
"outputs": [],
"source": [
"fig, axs = plt.subplots(1, 2, figsize=(16, 4))\n",
"for da, label in zip((dref, scenh, dhist), (\"Reference\", \"Adjusted\", \"Simulated\")):\n",
"for da, label in zip((ref, scenh, hist), (\"Reference\", \"Adjusted\", \"Simulated\")):\n",
" ds = sdba.unstack_variables(da).isel(location=2)\n",
" # time series - tasmax\n",
" ds.tasmax.plot(ax=axs[0], label=label, alpha=0.65 if label == \"Adjusted\" else 1)\n",
" # scatter plot\n",
" ds.plot.scatter(x=\"pr\", y=\"tasmax\", ax=axs[1], label=label)\n",
" ds.plot.scatter(x=\"pr\", y=\"tasmax\", ax=axs[1], label=label, marker=\".\", s=40)\n",
"axs[0].legend()\n",
"axs[1].legend()"
]
Expand Down Expand Up @@ -808,7 +791,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
"version": "3.12.5"
},
"toc": {
"base_numbering": 1,
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ dependencies:
- nbsphinx
- nbval >=0.11.0
- nc-time-axis >=1.4.1
- netcdf4 # Required for some Jupyter notebooks
- notebook
- pandas-stubs >=2.2
- pooch >=1.8.0
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ dependencies = [
"packaging >=24.0",
"pandas >=2.2",
"pint >=0.18",
"pyarrow >=15.0.0", # Strongly encouraged for pandas v2.2.0+
"pyarrow >=10.0.1", # Strongly encouraged for pandas v2.2.0+
"pyyaml >=6.0.1",
"scikit-learn >=1.1.0",
"scipy >=1.9.0",
Expand Down Expand Up @@ -136,7 +136,7 @@ target-version = [
]

[tool.bumpversion]
current_version = "0.52.1-dev.5"
current_version = "0.52.3-dev.0"
commit = true
commit_args = "--no-verify"
tag = false
Expand Down
3 changes: 0 additions & 3 deletions tests/test_testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@ def file_md5_checksum(f_name):
hash_md5.update(f.read())
return hash_md5.hexdigest()

@pytest.mark.skip(
"This test has been significantly modified. Will adjust when #1889 is merged."
)
@pytest.mark.requires_internet
def test_open_testdata(
self,
Expand Down
2 changes: 1 addition & 1 deletion xclim/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

__author__ = """Travis Logan"""
__email__ = "[email protected]"
__version__ = "0.52.1-dev.5"
__version__ = "0.52.3-dev.0"


with _resources.as_file(_resources.files("xclim.data")) as _module_data:
Expand Down
8 changes: 4 additions & 4 deletions xclim/core/calendar.py
Original file line number Diff line number Diff line change
Expand Up @@ -773,7 +773,7 @@ def time_bnds( # noqa: C901
time = time.indexes[time.name]
elif isinstance(time, (DataArrayResample, DatasetResample)):
for grouper in time.groupers:
if "time" in grouper.dims:
if isinstance(grouper.grouper, xr.groupers.TimeResampler):
datetime = grouper.unique_coord.data
freq = freq or grouper.grouper.freq
if datetime.dtype == "O":
Expand Down Expand Up @@ -1354,13 +1354,13 @@ def stack_periods(
for _, strd_slc in da.resample(time=strd_frq).groups.items():
win_resamp = time2.isel(time=slice(strd_slc.start, None)).resample(time=win_frq)
# Get slice for first group
win_slc = win_resamp._group_indices[0]
win_slc = list(win_resamp.groups.values())[0]
if min_length < window:
# If we ask for a min_length period instead is it complete ?
min_resamp = time2.isel(time=slice(strd_slc.start, None)).resample(
time=minl_frq
)
min_slc = min_resamp._group_indices[0]
min_slc = list(min_resamp.groups.values())[0]
open_ended = min_slc.stop is None
else:
# The end of the group slice is None if no outside-group value was found after the last element
Expand Down Expand Up @@ -1545,7 +1545,7 @@ def _reconstruct_time(_time_as_delta, _start):
periods = []
for i, (start, length) in enumerate(zip(starts.values, lengths.values)):
real_time = _reconstruct_time(time_as_delta, start)
slices = real_time.resample(time=strd_frq)._group_indices
slices = list(real_time.resample(time=strd_frq).groups.values())
if i == 0:
slc = slice(slices[0].start, min(slices[mid].stop, length))
elif i == da.period.size - 1:
Expand Down
6 changes: 5 additions & 1 deletion xclim/core/units.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import cf_xarray.units
import numpy as np
import pandas as pd
import pint
import xarray as xr
from boltons.funcutils import wraps
Expand Down Expand Up @@ -650,7 +651,10 @@ def _rate_and_amount_converter(
start = time.indexes[dim][0]
if not start_anchor:
# Anchor is on the end of the period, subtract 1 period.
start = start - xr.coding.cftime_offsets.to_offset(freq)
if isinstance(start, pd.Timestamp):
start = start - pd.tseries.frequencies.to_offset(freq)
else:
start = start - xr.coding.cftime_offsets.to_offset(freq)
# In the diff below, assign to upper label!
label = "upper"
# We generate "time" with an extra element, so we do not need to repeat the last element below.
Expand Down
17 changes: 13 additions & 4 deletions xclim/indices/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,15 @@
import numba as nb
import numpy as np
import xarray as xr
from xarray.coding.calendar_ops import (
_datetime_to_decimal_year as datetime_to_decimal_year,
)

try:
from xarray.coding.calendar_ops import (
_datetime_to_decimal_year as datetime_to_decimal_year,
)
except ImportError:
XR2409 = True
else:
XR2409 = False

from xclim.core import Quantified
from xclim.core.calendar import ensure_cftime_array, get_calendar
Expand Down Expand Up @@ -70,7 +76,10 @@ def day_angle(time: xr.DataArray):
the beginning of the year up to that timestep. Also called the "julian day fraction".
See :py:func:`~xclim.core.calendar.datetime_to_decimal_year`.
"""
decimal_year = datetime_to_decimal_year(times=time, calendar=time.dt.calendar)
if XR2409:
decimal_year = time.dt.decimal_year
else:
decimal_year = datetime_to_decimal_year(times=time, calendar=time.dt.calendar)
return ((decimal_year % 1) * 2 * np.pi).assign_attrs(units="rad")


Expand Down

0 comments on commit 9245be5

Please sign in to comment.