Skip to content

Commit

Permalink
Fix for xarray 2024.09.0 (#1920)
Browse files Browse the repository at this point in the history
Well, this could have been expected from my decision to use a private
function that I myself removed... But I had forgotten that we were
explicitly using it.

So this fixes xclim with xarray 2024.09.0 by having a try-except for the
import of the decimal year stuff.

Also, removed the deprecated function.

As of xarray 2024.09, the "decimal year" can be calculated through
`times.dt.decimal_year`, no need to use a private function.

This also adds the missing `branch` specifiers to `open_dataset` calls
in the notebooks.

---------

Co-authored-by: Trevor James Smith <[email protected]>
  • Loading branch information
aulemahal and Zeitsperre authored Sep 16, 2024
1 parent 9fc081d commit 1456127
Show file tree
Hide file tree
Showing 11 changed files with 71 additions and 41 deletions.
9 changes: 9 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@
Changelog
=========

v0.52.2 (2024-09-13)
--------------------
Contributors to this version: Pascal Bourgault (:user:`aulemahal`).

Bug fixes
^^^^^^^^^
* Fixed ``decimal_year`` import, fixed functions ``rate2amount``, ``amount2rate``, ``time_bnds`` and ``stack_periods`` for xarray 2024.09.0. Removed ``datetime_to_decimal_year`` as the mirrored xarray functions was replaced by ``ds.time.dt.decimal_year``. (:pull:`1920`).


v0.52.1 (2024-09-11)
--------------------
Contributors to this version: Trevor James Smith (:user:`Zeitsperre`).
Expand Down
2 changes: 2 additions & 0 deletions docs/notebooks/analogs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
"source": [
"sim = open_dataset(\n",
" \"SpatialAnalogs/CanESM2_ScenGen_Chibougamau_2041-2070.nc\",\n",
" branch=\"v2023.12.14\",\n",
" decode_timedelta=False,\n",
")\n",
"sim"
Expand All @@ -73,6 +74,7 @@
"source": [
"obs = open_dataset(\n",
" \"SpatialAnalogs/NRCAN_SECan_1981-2010.nc\",\n",
" branch=\"v2023.12.14\",\n",
" decode_timedelta=False,\n",
")\n",
"obs"
Expand Down
2 changes: 1 addition & 1 deletion docs/notebooks/extendxclim.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@
"source": [
"from xclim.testing import open_dataset\n",
"\n",
"ds = open_dataset(\"ERA5/daily_surface_cancities_1990-1993.nc\")\n",
"ds = open_dataset(\"ERA5/daily_surface_cancities_1990-1993.nc\", branch=\"v2023.12.14\")\n",
"with xr.set_options(keep_attrs=True):\n",
" ds2 = ds.assign(\n",
" pr_per=xc.core.calendar.percentile_doy(ds.pr, window=5, per=75).isel(\n",
Expand Down
28 changes: 20 additions & 8 deletions docs/notebooks/sdba-advanced.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -436,10 +436,10 @@
"\n",
"group = sdba.Grouper(\"time.dayofyear\", window=31)\n",
"\n",
"dref = convert_calendar(open_dataset(\"sdba/ahccd_1950-2013.nc\"), \"noleap\").sel(\n",
" time=slice(\"1981\", \"2010\")\n",
")\n",
"dsim = open_dataset(\"sdba/CanESM2_1950-2100.nc\")\n",
"dref = convert_calendar(\n",
" open_dataset(\"sdba/ahccd_1950-2013.nc\", branch=\"v2023.12.14\"), \"noleap\"\n",
").sel(time=slice(\"1981\", \"2010\"))\n",
"dsim = open_dataset(\"sdba/CanESM2_1950-2100.nc\", branch=\"v2023.12.14\")\n",
"\n",
"dref = dref.assign(\n",
" tasmax=convert_units_to(dref.tasmax, \"K\"),\n",
Expand Down Expand Up @@ -756,10 +756,20 @@
"from xclim.testing import open_dataset\n",
"\n",
"# load test data\n",
"hist = open_dataset(\"sdba/CanESM2_1950-2100.nc\").sel(time=slice(\"1950\", \"1980\")).tasmax\n",
"ref = open_dataset(\"sdba/nrcan_1950-2013.nc\").sel(time=slice(\"1950\", \"1980\")).tasmax\n",
"hist = (\n",
" open_dataset(\"sdba/CanESM2_1950-2100.nc\", branch=\"v2023.12.14\")\n",
" .sel(time=slice(\"1950\", \"1980\"))\n",
" .tasmax\n",
")\n",
"ref = (\n",
" open_dataset(\"sdba/nrcan_1950-2013.nc\", branch=\"v2023.12.14\")\n",
" .sel(time=slice(\"1950\", \"1980\"))\n",
" .tasmax\n",
")\n",
"sim = (\n",
" open_dataset(\"sdba/CanESM2_1950-2100.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax\n",
" open_dataset(\"sdba/CanESM2_1950-2100.nc\", branch=\"v2023.12.14\")\n",
" .sel(time=slice(\"1980\", \"2010\"))\n",
" .tasmax\n",
") # biased\n",
"\n",
"# learn the bias in historical simulation compared to reference\n",
Expand All @@ -770,7 +780,9 @@
"# correct the bias in the future\n",
"scen = QM.adjust(sim, extrapolation=\"constant\", interp=\"nearest\")\n",
"ref_future = (\n",
" open_dataset(\"sdba/nrcan_1950-2013.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax\n",
" open_dataset(\"sdba/nrcan_1950-2013.nc\", branch=\"v2023.12.14\")\n",
" .sel(time=slice(\"1980\", \"2010\"))\n",
" .tasmax\n",
") # truth\n",
"\n",
"plt.figure(figsize=(15, 5))\n",
Expand Down
10 changes: 6 additions & 4 deletions docs/notebooks/sdba.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -459,9 +459,9 @@
"from xclim.core.units import convert_units_to\n",
"from xclim.testing import open_dataset\n",
"\n",
"dref = open_dataset(\"sdba/ahccd_1950-2013.nc\", drop_variables=[\"lat\", \"lon\"]).sel(\n",
" time=slice(\"1981\", \"2010\")\n",
")\n",
"dref = open_dataset(\n",
" \"sdba/ahccd_1950-2013.nc\", branch=\"v2023.12.14\", drop_variables=[\"lat\", \"lon\"]\n",
").sel(time=slice(\"1981\", \"2010\"))\n",
"\n",
"# Fix the standard name of the `pr` variable.\n",
"# This allows the convert_units_to below to infer the correct CF transformation (precip rate to flux)\n",
Expand All @@ -472,7 +472,9 @@
" tasmax=convert_units_to(dref.tasmax, \"K\"),\n",
" pr=convert_units_to(dref.pr, \"kg m-2 s-1\"),\n",
")\n",
"dsim = open_dataset(\"sdba/CanESM2_1950-2100.nc\", drop_variables=[\"lat\", \"lon\"])\n",
"dsim = open_dataset(\n",
" \"sdba/CanESM2_1950-2100.nc\", branch=\"v2023.12.14\", drop_variables=[\"lat\", \"lon\"]\n",
")\n",
"\n",
"dhist = dsim.sel(time=slice(\"1981\", \"2010\"))\n",
"dsim = dsim.sel(time=slice(\"2041\", \"2070\"))\n",
Expand Down
3 changes: 1 addition & 2 deletions docs/notebooks/units.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
"import xarray as xr\n",
"\n",
"import xclim\n",
"from xclim import indices\n",
"from xclim.core import units\n",
"from xclim.testing import open_dataset\n",
"\n",
Expand Down Expand Up @@ -100,7 +99,7 @@
"metadata": {},
"outputs": [],
"source": [
"ds = open_dataset(\"ERA5/daily_surface_cancities_1990-1993.nc\")\n",
"ds = open_dataset(\"ERA5/daily_surface_cancities_1990-1993.nc\", branch=\"v2023.12.14\")\n",
"ds.pr.attrs"
]
},
Expand Down
4 changes: 3 additions & 1 deletion docs/notebooks/usage.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,9 @@
"# ds = xr.open_dataset(\"your_file.nc\")\n",
"\n",
"# For this example, let's use a test dataset from xclim:\n",
"ds = testing.open_dataset(\"ERA5/daily_surface_cancities_1990-1993.nc\")\n",
"ds = testing.open_dataset(\n",
" \"ERA5/daily_surface_cancities_1990-1993.nc\", branch=\"v2023.12.14\"\n",
")\n",
"ds.tas"
]
},
Expand Down
7 changes: 7 additions & 0 deletions tests/test_testing_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,9 @@ def test_get_failure(self, tmp_path):
tmp_path,
)

@pytest.mark.xfail(
reason="This test can no longer pass due to upstream changes and has been removed on main."
)
@pytest.mark.requires_internet
def test_open_dataset_with_bad_file(self, tmp_path):
cmip3_folder = tmp_path.joinpath("main", "cmip3")
Expand Down Expand Up @@ -75,6 +78,10 @@ def test_open_dataset_with_bad_file(self, tmp_path):
== Path(cmip3_folder, cmip3_md5).read_text()
)

@pytest.mark.xfail(
reason="The underlying engine here has been significantly modified. "
"This test needs to be fully rewritten in xclim v0.53.0+."
)
@pytest.mark.requires_internet
def test_open_testdata(self):
ds = utilities.open_dataset(
Expand Down
24 changes: 4 additions & 20 deletions xclim/core/calendar.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
"convert_doy",
"date_range",
"date_range_like",
"datetime_to_decimal_year",
"days_in_year",
"days_since_to_doy",
"doy_from_string",
Expand Down Expand Up @@ -403,21 +402,6 @@ def ensure_cftime_array(time: Sequence) -> np.ndarray | Sequence[cftime.datetime
raise ValueError("Unable to cast array to cftime dtype")


def datetime_to_decimal_year(times: xr.DataArray, calendar: str = "") -> xr.DataArray:
"""Deprecated : use :py:func:`xarray.coding.calendar_ops_datetime_to_decimal_year` instead.
Convert a datetime xr.DataArray to decimal years according to its calendar or the given one.
"""
_, _ = _get_usecf_and_warn(
"standard",
"datetime_to_decimal_year",
"xarray.coding.calendar_ops._datetime_to_decimal_year",
)
return xr.coding.calendar_ops._datetime_to_decimal_year(
times, dim="time", calendar=calendar
)


@update_xclim_history
def percentile_doy(
arr: xr.DataArray,
Expand Down Expand Up @@ -864,7 +848,7 @@ def time_bnds( # noqa: C901
time = time.indexes[time.name]
elif isinstance(time, (DataArrayResample, DatasetResample)):
for grouper in time.groupers:
if "time" in grouper.dims:
if isinstance(grouper.grouper, xr.groupers.TimeResampler):
datetime = grouper.unique_coord.data
freq = freq or grouper.grouper.freq
if datetime.dtype == "O":
Expand Down Expand Up @@ -1457,13 +1441,13 @@ def stack_periods(
for _, strd_slc in da.resample(time=strd_frq).groups.items():
win_resamp = time2.isel(time=slice(strd_slc.start, None)).resample(time=win_frq)
# Get slice for first group
win_slc = win_resamp._group_indices[0]
win_slc = list(win_resamp.groups.values())[0]
if min_length < window:
# If we ask for a min_length period instead is it complete ?
min_resamp = time2.isel(time=slice(strd_slc.start, None)).resample(
time=minl_frq
)
min_slc = min_resamp._group_indices[0]
min_slc = list(min_resamp.groups.values())[0]
open_ended = min_slc.stop is None
else:
# The end of the group slice is None if no outside-group value was found after the last element
Expand Down Expand Up @@ -1646,7 +1630,7 @@ def _reconstruct_time(_time_as_delta, _start):
periods = []
for i, (start, length) in enumerate(zip(starts.values, lengths.values)):
real_time = _reconstruct_time(time_as_delta, start)
slices = real_time.resample(time=strd_frq)._group_indices
slices = list(real_time.resample(time=strd_frq).groups.values())
if i == 0:
slc = slice(slices[0].start, min(slices[mid].stop, length))
elif i == da.period.size - 1:
Expand Down
6 changes: 5 additions & 1 deletion xclim/core/units.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import cf_xarray.units
import numpy as np
import pandas as pd
import pint
import xarray as xr
from boltons.funcutils import wraps
Expand Down Expand Up @@ -646,7 +647,10 @@ def _rate_and_amount_converter(
start = time.indexes[dim][0]
if not start_anchor:
# Anchor is on the end of the period, subtract 1 period.
start = start - xr.coding.cftime_offsets.to_offset(freq)
if isinstance(start, pd.Timestamp):
start = start - pd.tseries.frequencies.to_offset(freq)
else:
start = start - xr.coding.cftime_offsets.to_offset(freq)
# In the diff below, assign to upper label!
label = "upper"
# We generate "time" with an extra element, so we do not need to repeat the last element below.
Expand Down
17 changes: 13 additions & 4 deletions xclim/indices/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,15 @@
import numba as nb
import numpy as np
import xarray as xr
from xarray.coding.calendar_ops import (
_datetime_to_decimal_year as datetime_to_decimal_year,
)

try:
from xarray.coding.calendar_ops import (
_datetime_to_decimal_year as datetime_to_decimal_year,
)
except ImportError:
XR2409 = True
else:
XR2409 = False

from xclim.core.calendar import ensure_cftime_array, get_calendar
from xclim.core.units import convert_units_to
Expand Down Expand Up @@ -69,7 +75,10 @@ def day_angle(time: xr.DataArray):
the beginning of the year up to that timestep. Also called the "julian day fraction".
See :py:func:`~xclim.core.calendar.datetime_to_decimal_year`.
"""
decimal_year = datetime_to_decimal_year(times=time, calendar=time.dt.calendar)
if XR2409:
decimal_year = time.dt.decimal_year
else:
decimal_year = datetime_to_decimal_year(times=time, calendar=time.dt.calendar)
return ((decimal_year % 1) * 2 * np.pi).assign_attrs(units="rad")


Expand Down

0 comments on commit 1456127

Please sign in to comment.