Skip to content

Commit

Permalink
fix linting issues raised from new pylint and codespell versions (#1772)
Browse files Browse the repository at this point in the history
### What kind of change does this PR introduce?

* Fixes new errors raised from the newer versions of `pylint` and
`codespell`
* Sets the doctest examples to all use `h5netcdf` with separate caches
to load datasets

### Does this PR introduce a breaking change?

No.

### Other information:

https://pypi.org/project/pylint/3.2.3/
https://pypi.org/project/codespell/2.3.0/
  • Loading branch information
Zeitsperre authored Jun 10, 2024
2 parents 7ec8b74 + b67e51e commit 1f4b4da
Show file tree
Hide file tree
Showing 14 changed files with 72 additions and 52 deletions.
2 changes: 2 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ Internal changes
* Synchronized tooling versions across ``pyproject.toml`` and ``tox.ini`` and pinned them to the latest stable releases in GitHub Workflows. (:pull:`1744`).
* Fixed a few small spelling and grammar issues that were causing errors with `codespell`. Now ignoring `SVG` files. (:pull:`1769`).
* Temporarily skipping the ``test_hawkins_sutton_smoke`` test due to strange behaviour with `xarray`. (:pull:`1769`).
* Fixed some previously uncaught errors raised from recent versions of `pylint` and `codespell`. (:pull:`1772`).
* Set the `doctest` examples to all use `h5netcdf` with worker-separated caches to load datasets. (:pull:`1772`).

v0.49.0 (2024-05-02)
--------------------
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ values = [

[tool.codespell]
skip = 'xclim/data/*.json,docs/_build,docs/notebooks/xclim_training/*.ipynb,docs/references.bib,__pycache__,*.gz,*.nc,*.png,*.svg,*.whl'
ignore-words-list = "absolue,bloc,bui,callendar,degreee,environnement,hanel,inferrable,lond,nam,nd,ressources,sie,vas"
ignore-words-list = "absolue,bloc,bui,callendar,degreee,environnement,hanel,inferrable,lond,nam,nd,ressources,socio-economic,sie,vas"

[tool.coverage.run]
relative_files = true
Expand Down
2 changes: 2 additions & 0 deletions tests/test_calendar.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,8 @@ def test_time_bnds_irregular(typ):
elif typ == "pd":
start = pd.date_range("1990-01-01", periods=24, freq="MS")
end = pd.date_range("1990-01-01 23:59:59.999999999", periods=24, freq="ME")
else:
raise ValueError("`typ` must be 'pd' or 'xr'")

time = start + (end - start) / 2

Expand Down
2 changes: 2 additions & 0 deletions tests/test_indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -2971,6 +2971,8 @@ def test_rain_season(pr_series, result_type, method_dry_start):
elif result_type == "end_cond_fails":
pr[{"time": 99 + 20 - 1}] = 5
out_exp = [3, np.NaN, 363]
else:
raise ValueError(f"Unknown result_type: {result_type}")

out = {}
out["start"], out["end"], out["length"] = xci.rain_season(
Expand Down
4 changes: 2 additions & 2 deletions xclim/indices/_agro.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,7 +506,7 @@ def cool_night_index(
Examples
--------
>>> from xclim.indices import cool_night_index
>>> tasmin = xr.open_dataset(path_to_tasmin_file).tasmin
>>> tasmin = open_dataset(path_to_tasmin_file).tasmin
>>> cni = cool_night_index(tasmin)
References
Expand Down Expand Up @@ -1170,7 +1170,7 @@ def standardized_precipitation_index(
-------
>>> from datetime import datetime
>>> from xclim.indices import standardized_precipitation_index
>>> ds = xr.open_dataset(path_to_pr_file)
>>> ds = open_dataset(path_to_pr_file)
>>> pr = ds.pr
>>> cal_start, cal_end = "1990-05-01", "1990-08-31"
>>> spi_3 = standardized_precipitation_index(
Expand Down
8 changes: 4 additions & 4 deletions xclim/indices/_anuclim.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def temperature_seasonality(
The following would compute for each grid cell of file `tas.day.nc` the annual temperature seasonality:
>>> import xclim.indices as xci
>>> t = xr.open_dataset(path_to_tas_file).tas
>>> t = open_dataset(path_to_tas_file).tas
>>> tday_seasonality = xci.temperature_seasonality(t)
>>> t_weekly = xci.tg_mean(t, freq="7D")
>>> tweek_seasonality = xci.temperature_seasonality(t_weekly)
Expand Down Expand Up @@ -179,7 +179,7 @@ def precip_seasonality(pr: xarray.DataArray, freq: str = "YS") -> xarray.DataArr
The following would compute for each grid cell of file `pr.day.nc` the annual precipitation seasonality:
>>> import xclim.indices as xci
>>> p = xr.open_dataset(path_to_pr_file).pr
>>> p = open_dataset(path_to_pr_file).pr
>>> pday_seasonality = xci.precip_seasonality(p)
>>> p_weekly = xci.precip_accumulation(p, freq="7D")
Expand Down Expand Up @@ -242,7 +242,7 @@ def tg_mean_warmcold_quarter(
warmest quarter mean temperature:
>>> from xclim.indices import tg_mean_warmcold_quarter
>>> t = xr.open_dataset(path_to_tas_file)
>>> t = open_dataset(path_to_tas_file)
>>> t_warm_qrt = tg_mean_warmcold_quarter(tas=t.tas, op="warmest")
Notes
Expand Down Expand Up @@ -353,7 +353,7 @@ def prcptot_wetdry_quarter(
The following would compute for each grid cell of file `pr.day.nc` the annual wettest quarter total precipitation:
>>> from xclim.indices import prcptot_wetdry_quarter
>>> p = xr.open_dataset(path_to_pr_file)
>>> p = open_dataset(path_to_pr_file)
>>> pr_warm_qrt = prcptot_wetdry_quarter(pr=p.pr, op="wettest")
Notes
Expand Down
26 changes: 13 additions & 13 deletions xclim/indices/_multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def cold_spell_duration_index(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import cold_spell_duration_index
>>> tasmin = xr.open_dataset(path_to_tasmin_file).tasmin.isel(lat=0, lon=0)
>>> tasmin = open_dataset(path_to_tasmin_file).tasmin.isel(lat=0, lon=0)
>>> tn10 = percentile_doy(tasmin, per=10).sel(percentiles=10)
>>> cold_spell_duration_index(tasmin, tn10)
Expand Down Expand Up @@ -971,7 +971,7 @@ def precip_accumulation(
precipitation at the seasonal frequency, ie DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import precip_accumulation
>>> pr_day = xr.open_dataset(path_to_pr_file).pr
>>> pr_day = open_dataset(path_to_pr_file).pr
>>> prcp_tot_seasonal = precip_accumulation(pr_day, freq="QS-DEC")
"""
if phase == "liquid":
Expand Down Expand Up @@ -1034,7 +1034,7 @@ def precip_average(
precipitation at the seasonal frequency, ie DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import precip_average
>>> pr_day = xr.open_dataset(path_to_pr_file).pr
>>> pr_day = open_dataset(path_to_pr_file).pr
>>> prcp_tot_seasonal = precip_average(pr_day, freq="QS-DEC")
"""
if phase == "liquid":
Expand Down Expand Up @@ -1146,8 +1146,8 @@ def high_precip_low_temp(
Example
-------
To compute the number of days with intense rainfall while minimum temperatures dip below -0.2C:
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> tasmin = xr.open_dataset(path_to_tasmin_file).tasmin
>>> pr = open_dataset(path_to_pr_file).pr
>>> tasmin = open_dataset(path_to_tasmin_file).tasmin
>>> high_precip_low_temp(
... pr, tas=tasmin, pr_thresh="10 mm/d", tas_thresh="-0.2 degC"
... )
Expand Down Expand Up @@ -1204,7 +1204,7 @@ def days_over_precip_thresh(
Examples
--------
>>> from xclim.indices import days_over_precip_thresh
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> p75 = pr.quantile(0.75, dim="time", keep_attrs=True)
>>> r75p = days_over_precip_thresh(pr, p75)
"""
Expand Down Expand Up @@ -1333,7 +1333,7 @@ def tg90p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tg90p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tas_per = percentile_doy(tas, per=90).sel(percentiles=90)
>>> hot_days = tg90p(tas, tas_per)
"""
Expand Down Expand Up @@ -1391,7 +1391,7 @@ def tg10p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tg10p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tas_per = percentile_doy(tas, per=10).sel(percentiles=10)
>>> cold_days = tg10p(tas, tas_per)
"""
Expand Down Expand Up @@ -1449,7 +1449,7 @@ def tn90p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tn90p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tas_per = percentile_doy(tas, per=90).sel(percentiles=90)
>>> hot_days = tn90p(tas, tas_per)
"""
Expand Down Expand Up @@ -1507,7 +1507,7 @@ def tn10p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tn10p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tas_per = percentile_doy(tas, per=10).sel(percentiles=10)
>>> cold_days = tn10p(tas, tas_per)
"""
Expand Down Expand Up @@ -1565,7 +1565,7 @@ def tx90p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tx90p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tasmax_per = percentile_doy(tas, per=90).sel(percentiles=90)
>>> hot_days = tx90p(tas, tasmax_per)
"""
Expand Down Expand Up @@ -1623,7 +1623,7 @@ def tx10p(
--------
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import tx10p
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
>>> tasmax_per = percentile_doy(tas, per=10).sel(percentiles=10)
>>> cold_days = tx10p(tas, tasmax_per)
"""
Expand Down Expand Up @@ -1762,7 +1762,7 @@ def warm_spell_duration_index(
>>> from xclim.core.calendar import percentile_doy
>>> from xclim.indices import warm_spell_duration_index
>>> tasmax = xr.open_dataset(path_to_tasmax_file).tasmax.isel(lat=0, lon=0)
>>> tasmax = open_dataset(path_to_tasmax_file).tasmax.isel(lat=0, lon=0)
>>> tasmax_per = percentile_doy(tasmax, per=90).sel(percentiles=90)
>>> warm_spell_duration_index(tasmax, tasmax_per)
"""
Expand Down
14 changes: 7 additions & 7 deletions xclim/indices/_simple.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def tg_mean(tas: xarray.DataArray, freq: str = "YS") -> xarray.DataArray:
at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import tg_mean
>>> t = xr.open_dataset(path_to_tas_file).tas
>>> t = open_dataset(path_to_tas_file).tas
>>> tg = tg_mean(t, freq="QS-DEC")
"""
return select_resample_op(tas, op="mean", freq=freq)
Expand Down Expand Up @@ -428,7 +428,7 @@ def max_1day_precipitation_amount(
The following would compute for each grid cell the highest 1-day total at an annual frequency:
>>> from xclim.indices import max_1day_precipitation_amount
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> rx1day = max_1day_precipitation_amount(pr, freq="YS")
"""
return select_resample_op(pr, op="max", freq=freq)
Expand Down Expand Up @@ -462,7 +462,7 @@ def max_n_day_precipitation_amount(
The following would compute for each grid cell the highest 5-day total precipitation at an annual frequency:
>>> from xclim.indices import max_n_day_precipitation_amount
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> out = max_n_day_precipitation_amount(pr, window=5, freq="YS")
"""
# Rolling sum of the values
Expand Down Expand Up @@ -499,7 +499,7 @@ def max_pr_intensity(
The following would compute the maximum 6-hour precipitation intensity at an annual frequency:
>>> from xclim.indices import max_pr_intensity
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> out = max_pr_intensity(pr, window=5, freq="YS")
"""
# Rolling sum of the values
Expand Down Expand Up @@ -569,7 +569,7 @@ def sfcWind_max( # noqa: N802
at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import sfcWind_max
>>> fg = xr.open_dataset(path_to_sfcWind_file).sfcWind
>>> fg = open_dataset(path_to_sfcWind_file).sfcWind
>>> fg_max = sfcWind_max(fg, freq="QS-DEC")
"""
return sfcWind.resample(time=freq).max(dim="time").assign_attrs(units=sfcWind.units)
Expand Down Expand Up @@ -610,7 +610,7 @@ def sfcWind_mean( # noqa: N802
at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import sfcWind_mean
>>> fg = xr.open_dataset(path_to_sfcWind_file).sfcWind
>>> fg = open_dataset(path_to_sfcWind_file).sfcWind
>>> fg_mean = sfcWind_mean(fg, freq="QS-DEC")
"""
return (
Expand Down Expand Up @@ -653,7 +653,7 @@ def sfcWind_min( # noqa: N802
at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import sfcWind_min
>>> fg = xr.open_dataset(path_to_sfcWind_file).sfcWind
>>> fg = open_dataset(path_to_sfcWind_file).sfcWind
>>> fg_min = sfcWind_min(fg, freq="QS-DEC")
"""
return sfcWind.resample(time=freq).min(dim="time").assign_attrs(units=sfcWind.units)
Expand Down
16 changes: 8 additions & 8 deletions xclim/indices/_threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -773,7 +773,7 @@ def daily_pr_intensity(
precipitation >= 5 mm at seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import daily_pr_intensity
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> daily_int = daily_pr_intensity(pr, thresh="5 mm/day", freq="QS-DEC")
"""
t = convert_units_to(thresh, pr, "hydro")
Expand Down Expand Up @@ -1142,7 +1142,7 @@ def growing_season_length(
Examples
--------
>>> from xclim.indices import growing_season_length
>>> tas = xr.open_dataset(path_to_tas_file).tas
>>> tas = open_dataset(path_to_tas_file).tas
For the Northern Hemisphere:
Expand Down Expand Up @@ -1228,7 +1228,7 @@ def frost_season_length(
Examples
--------
>>> from xclim.indices import frost_season_length
>>> tasmin = xr.open_dataset(path_to_tasmin_file).tasmin
>>> tasmin = open_dataset(path_to_tasmin_file).tasmin
For the Northern Hemisphere:
Expand Down Expand Up @@ -1411,7 +1411,7 @@ def frost_free_season_length(
Examples
--------
>>> from xclim.indices import frost_season_length
>>> tasmin = xr.open_dataset(path_to_tasmin_file).tasmin
>>> tasmin = open_dataset(path_to_tasmin_file).tasmin
For the Northern Hemisphere:
Expand Down Expand Up @@ -2604,7 +2604,7 @@ def wetdays(
at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import wetdays
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> wd = wetdays(pr, thresh="5 mm/day", freq="QS-DEC")
"""
thresh = convert_units_to(thresh, pr, "hydro")
Expand Down Expand Up @@ -2646,7 +2646,7 @@ def wetdays_prop(
5 mm at the seasonal frequency, i.e. DJF, MAM, JJA, SON, DJF, etc.:
>>> from xclim.indices import wetdays_prop
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> wd = wetdays_prop(pr, thresh="5 mm/day", freq="QS-DEC")
"""
thresh = convert_units_to(thresh, pr, "hydro")
Expand Down Expand Up @@ -3179,7 +3179,7 @@ def dry_spell_frequency(
Examples
--------
>>> from xclim.indices import dry_spell_frequency
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> dsf = dry_spell_frequency(pr=pr, op="sum")
>>> dsf = dry_spell_frequency(pr=pr, op="max")
"""
Expand Down Expand Up @@ -3372,7 +3372,7 @@ def wet_spell_frequency(
Examples
--------
>>> from xclim.indices import wet_spell_frequency
>>> pr = xr.open_dataset(path_to_pr_file).pr
>>> pr = open_dataset(path_to_pr_file).pr
>>> dsf = wet_spell_frequency(pr=pr, op="sum")
>>> dsf = wet_spell_frequency(pr=pr, op="max")
"""
Expand Down
8 changes: 8 additions & 0 deletions xclim/indices/fire/_cffwis.py
Original file line number Diff line number Diff line change
Expand Up @@ -597,6 +597,8 @@ def _fire_season(
elif method in ["LA08", "GFWED"]:
# In LA08, the check INCLUDES the current day,
start_index = max(temp_condition_days, snow_condition_days)
else:
raise ValueError("`method` must be one of 'WF93', 'LA08' or 'GFWED'.")

for it in range(start_index, tas.shape[-1]):
if method == "WF93":
Expand Down Expand Up @@ -627,6 +629,8 @@ def _fire_season(

# Shut down when mean snow OR mean temp are over/under threshold
shut_down = (msnow >= snow_thresh) | (mtemp < temp_end_thresh)
else:
raise ValueError("`method` must be one of 'WF93', 'LA08' or 'GFWED'.")

# Mask is on if the previous days was on OR is there is a start-up, AND if it's not a shut-down,
# Aka is off if either the previous day was or it is a shut-down.
Expand Down Expand Up @@ -687,6 +691,10 @@ def _fire_weather_calc( # noqa: C901

overwintering = params["overwintering"]
dry_start = params["dry_start"]

ow_DC = dc0.copy() if dc0 is not None else None
ow_DMC = dmc0.copy() if dmc0 is not None else None

if overwintering and "DC" in ind_prevs:
# In overwintering, dc0 is understood as the previous season's last DC code.
ow_DC = dc0.copy()
Expand Down
2 changes: 1 addition & 1 deletion xclim/indices/fire/_ffdi.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def _griffiths_drought_factor(p, smd, lim, df): # pragma: no cover
# N = 0 defines a rainfall event since 9am today,
# so doesn't apply here, where p is the rainfall
# over previous 24 hours.
x_ = N**1.3 / (N**1.3 + P - 2.0)
x_ = N**1.3 / (N**1.3 + P - 2.0) # pylint: disable=E0601
x = min(x_, x)

conseq = 0
Expand Down
6 changes: 5 additions & 1 deletion xclim/sdba/adjustment.py
Original file line number Diff line number Diff line change
Expand Up @@ -959,14 +959,18 @@ def _compute_transform_matrix(reference, historical):
# This step needs vectorize with dask, but vectorize doesn't work with dask, argh.
# Invert to get transformation matrix from hist to PC coords.
Hinv = np.linalg.inv(H)
# Fancy tricks to choose best orientation on each axes
# Fancy tricks to choose the best orientation on each axis.
# (using eigenvectors, the output axes orientation is undefined)
if best_orientation == "simple":
orient = best_pc_orientation_simple(R, Hinv)
elif best_orientation == "full":
orient = best_pc_orientation_full(
R, Hinv, reference.mean(axis=1), historical.mean(axis=1), historical
)
else:
raise ValueError(
f"Unknown `best_orientation` method: {best_orientation}."
)
# Get transformation matrix
return (R * orient) @ Hinv

Expand Down
Loading

0 comments on commit 1f4b4da

Please sign in to comment.