diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 98c149fe1..5185eda00 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -18,6 +18,7 @@ New features and enhancements Bug fixes ^^^^^^^^^ * Fixed a small inefficiency in ``_otc_adjust``, and the `standardize` method of `OTC/dOTC` is now applied on individual variable (:pull:`1890`, :pull:`1896`). +* Remove deprecated cells in the tutorial notebook `sdba.ipynb` (:pull:`1895`). Breaking changes ^^^^^^^^^^^^^^^^ diff --git a/docs/notebooks/sdba.ipynb b/docs/notebooks/sdba.ipynb index 037c58247..cfce4d70b 100644 --- a/docs/notebooks/sdba.ipynb +++ b/docs/notebooks/sdba.ipynb @@ -641,52 +641,30 @@ "metadata": {}, "outputs": [], "source": [ - "dref[\"pr\"] = dref.pr.chunk({\"location\": 1})\n", - "dref[\"tasmax\"] = dref.tasmax.chunk({\"location\": 1})\n", + "from xclim.core.units import convert_units_to\n", + "from xclim.testing import open_dataset\n", "\n", - "dhist[\"pr\"] = dhist.pr.chunk({\"location\": 1})\n", - "dhist[\"tasmax\"] = dhist.tasmax.chunk({\"location\": 1})\n", + "dref = open_dataset(\n", + " \"sdba/ahccd_1950-2013.nc\", chunks={\"location\": 1}, drop_variables=[\"lat\", \"lon\"]\n", + ").sel(time=slice(\"1981\", \"2010\"))\n", "\n", - "dsim[\"pr\"] = dsim.pr.chunk({\"location\": 1})\n", - "dsim[\"tasmax\"] = dsim.tasmax.chunk({\"location\": 1})" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "##### Perform an initial univariate adjustment." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# additive for tasmax\n", - "QDMtx = sdba.QuantileDeltaMapping.train(\n", - " dref.tasmax, dhist.tasmax, nquantiles=20, kind=\"+\", group=\"time\"\n", + "# Fix the standard name of the `pr` variable.\n", + "# This allows the convert_units_to below to infer the correct CF transformation (precip rate to flux)\n", + "# see the \"Unit handling\" notebook\n", + "dref.pr.attrs[\"standard_name\"] = \"lwe_precipitation_rate\"\n", + "\n", + "dref = dref.assign(\n", + " tasmax=convert_units_to(dref.tasmax, \"K\"),\n", + " pr=convert_units_to(dref.pr, \"kg m-2 s-1\"),\n", ")\n", - "# Adjust both hist and sim, we'll feed both to the Npdf transform.\n", - "scenh_tx = QDMtx.adjust(dhist.tasmax)\n", - "scens_tx = QDMtx.adjust(dsim.tasmax)\n", - "\n", - "# remove == 0 values in pr:\n", - "dref[\"pr\"] = sdba.processing.jitter_under_thresh(dref.pr, \"0.01 mm d-1\")\n", - "dhist[\"pr\"] = sdba.processing.jitter_under_thresh(dhist.pr, \"0.01 mm d-1\")\n", - "dsim[\"pr\"] = sdba.processing.jitter_under_thresh(dsim.pr, \"0.01 mm d-1\")\n", - "\n", - "# multiplicative for pr\n", - "QDMpr = sdba.QuantileDeltaMapping.train(\n", - " dref.pr, dhist.pr, nquantiles=20, kind=\"*\", group=\"time\"\n", + "dsim = open_dataset(\n", + " \"sdba/CanESM2_1950-2100.nc\", chunks={\"location\": 1}, drop_variables=[\"lat\", \"lon\"]\n", ")\n", - "# Adjust both hist and sim, we'll feed both to the Npdf transform.\n", - "scenh_pr = QDMpr.adjust(dhist.pr)\n", - "scens_pr = QDMpr.adjust(dsim.pr)\n", "\n", - "# Stack variables : Dataset -> DataArray with `multivar` dimension\n", - "dref, dhist, dsim = (sdba.stack_variables(da) for da in (dref, dhist, dsim))" + "dhist = dsim.sel(time=slice(\"1981\", \"2010\"))\n", + "dsim = dsim.sel(time=slice(\"2041\", \"2070\"))\n", + "\n", + "dref" ] }, { @@ -702,9 +680,14 @@ "metadata": {}, "outputs": [], "source": [ + "# Stack variables : Dataset -> DataArray with `multivar` dimension\n", + "ref = sdba.processing.stack_variables(dref)\n", + "hist = sdba.processing.stack_variables(dhist)\n", + "sim = sdba.processing.stack_variables(dsim)\n", + "\n", "ADJ = sdba.MBCn.train(\n", - " dref,\n", - " dhist,\n", + " ref,\n", + " hist,\n", " base_kws={\"nquantiles\": 20, \"group\": \"time\"},\n", " adj_kws={\"interp\": \"nearest\", \"extrapolation\": \"constant\"},\n", " n_iter=20, # perform 20 iteration\n", @@ -714,8 +697,8 @@ "scenh, scens = (\n", " ADJ.adjust(\n", " sim=ds,\n", - " ref=dref,\n", - " hist=dhist,\n", + " ref=ref,\n", + " hist=hist,\n", " base=sdba.QuantileDeltaMapping,\n", " base_kws_vars={\n", " \"pr\": {\n", @@ -725,9 +708,9 @@ " },\n", " \"tasmax\": {\"kind\": \"+\"},\n", " },\n", - " adj_kws={\"interp\": \"nearest\", \"extrapolation\": \"constant\"},\n", + " adj_kws={\"interp\": \"linear\", \"extrapolation\": \"constant\"},\n", " )\n", - " for ds in (dhist, dsim)\n", + " for ds in (hist, sim)\n", ")" ] }, @@ -767,12 +750,12 @@ "outputs": [], "source": [ "fig, axs = plt.subplots(1, 2, figsize=(16, 4))\n", - "for da, label in zip((dref, scenh, dhist), (\"Reference\", \"Adjusted\", \"Simulated\")):\n", + "for da, label in zip((ref, scenh, hist), (\"Reference\", \"Adjusted\", \"Simulated\")):\n", " ds = sdba.unstack_variables(da).isel(location=2)\n", " # time series - tasmax\n", " ds.tasmax.plot(ax=axs[0], label=label, alpha=0.65 if label == \"Adjusted\" else 1)\n", " # scatter plot\n", - " ds.plot.scatter(x=\"pr\", y=\"tasmax\", ax=axs[1], label=label)\n", + " ds.plot.scatter(x=\"pr\", y=\"tasmax\", ax=axs[1], label=label, marker=\".\", s=40)\n", "axs[0].legend()\n", "axs[1].legend()" ] @@ -808,7 +791,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.12.5" }, "toc": { "base_numbering": 1,