diff --git a/contrib/gagliano/wet_snow_testing.ipynb b/contrib/gagliano/wet_snow_testing.ipynb index decc39d..f6d6635 100644 --- a/contrib/gagliano/wet_snow_testing.ipynb +++ b/contrib/gagliano/wet_snow_testing.ipynb @@ -96,7 +96,7 @@ " ds_name = f.split('stacks/')[-1].split('.')[0]\n", " print(datetime.now(), f' -- starting {ds_name}')\n", "\n", - " if Path(f'rmse_out/{ds_name}_wet_flag.nc').is_file():\n", + " if Path(f'rmse_out_with_si/{ds_name}_wet_flag.nc').is_file():\n", " print('This file already exists, continuing.')\n", " continue\n", " \n", @@ -124,7 +124,7 @@ " ds = calc_snow_index(ds)\n", " ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n", " ds = id_newly_wet_snow(ds,wet_thresh=wst)\n", - " ds = id_wet_negative_si_test(ds)\n", + " ds = id_wet_negative_si(ds,wet_SI_thresh=0) #change to test depending on whether we want neg SI to flag as wet snow\n", " ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n", " ds = flag_wet_snow(ds)\n", " # Compare snow depths - mask wet snow\n", @@ -144,9 +144,9 @@ " print(f'Frac valid pixels = {mask_wet.sum() /mask.sum():0.2f}')\n", "\n", " # After loop, save RMSE results per file\n", - " rmse_wet_flag.to_netcdf(f'rmse_out/{ds_name}_wet_flag.nc')\n", - " rmse_no_flag.to_netcdf(f'rmse_out/{ds_name}_no_flag.nc')\n", - " valid_pixels.to_netcdf(f'rmse_out/{ds_name}_valid_pixels.nc')\n", + " rmse_wet_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_wet_flag.nc')\n", + " rmse_no_flag.to_netcdf(f'rmse_out_with_si/{ds_name}_no_flag.nc')\n", + " valid_pixels.to_netcdf(f'rmse_out_with_si/{ds_name}_valid_pixels.nc')\n", " " ] }, @@ -156,11 +156,21 @@ "metadata": {}, "outputs": [], "source": [ - "results = sorted(glob('rmse_out/*.nc'))\n", + "with_neg_si_flag_wet_snow = True\n", + "\n", + "directory = 'rmse_out_with_si'\n", + "#directory = 'rmse_out'\n", + "\n", + "#if with_neg_si_flag_wet_snow == True:\n", + "# directory = 'rmse_out_full_with_si'\n", + "#else:\n", + "# directory = 'rmse_out_full'\n", + "\n", + "results = sorted(glob(f'{directory}/*.nc'))\n", "names = []\n", "for f in results:\n", " if 'no_flag' in f:\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_no')[0]\n", " names.append(ds_name)\n", "\n", @@ -172,19 +182,19 @@ "for f in results:\n", " if 'wet_flag' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_wet')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " thresh_results.loc[ind,ds_name] = val\n", " if 'no_flag' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_no')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " no_thresh_results.loc[ind,ds_name] = val\n", " if 'valid' in f:\n", " r = xr.open_dataarray(f).load()\n", - " ds_name = f.split('rmse_out/')[-1]\n", + " ds_name = f.split(f'{directory}/')[-1]\n", " ds_name = ds_name.split('_valid')[0]\n", " for ind,val in zip(r.wet_snow_thresh.values,r.values):\n", " valid_pixels_results.loc[ind,ds_name] = val\n" @@ -251,7 +261,7 @@ " ds_name = f.split('stacks/')[-1].split('.')[0]\n", " print(datetime.now(), f' -- starting {ds_name}')\n", "\n", - " if Path(f'rmse_out_full/{ds_name}_wet_flag.nc').is_file():\n", + " if Path(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc').is_file():\n", " print('This file already exists, continuing.')\n", " continue\n", "\n", @@ -280,7 +290,7 @@ " ds = calc_snow_index(ds)\n", " ds = calc_snow_index_to_snow_depth(ds, C=c, inplace=False)\n", " ds = id_newly_wet_snow(ds,wet_thresh=wst)\n", - " ds = id_wet_negative_si_test(ds)\n", + " ds = id_wet_negative_si(ds) #change to test depdning on whether to remove neg SI = wet snow\n", " ds = id_newly_frozen_snow(ds,freeze_thresh=-1*wst)\n", " ds = flag_wet_snow(ds)\n", " # Compare snow depths - mask wet snow\n", @@ -300,9 +310,9 @@ " print(f'Frac valid pixels = {mask_wet.sum()/ mask.sum():0.2f}')\n", "\n", " # After loop, save RMSE results per file\n", - " rmse_wet_flag.to_netcdf(f'rmse_out_full/{ds_name}_wet_flag.nc')\n", - " rmse_no_flag.to_netcdf(f'rmse_out_full/{ds_name}_no_flag.nc')\n", - " valid_pixels.to_netcdf(f'rmse_out_full/{ds_name}_valid_pixels.nc')\n", + " rmse_wet_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_wet_flag.nc')\n", + " rmse_no_flag.to_netcdf(f'rmse_out_full_with_si/{ds_name}_no_flag.nc')\n", + " valid_pixels.to_netcdf(f'rmse_out_full_with_si/{ds_name}_valid_pixels.nc')\n", " " ] }, @@ -312,11 +322,14 @@ "metadata": {}, "outputs": [], "source": [ + "directory = 'rmse_out_full'\n", + "directory = 'rmse_out_full_with_si'\n", + "\n", "which_site = 5\n", "\n", - "results1 = sorted(glob('rmse_out_full/*wet*.nc'))\n", - "results2 = sorted(glob('rmse_out_full/*no*.nc'))\n", - "results3 = sorted(glob('rmse_out_full/*valid*.nc'))\n", + "results1 = sorted(glob(f'{directory}/*wet*.nc'))\n", + "results2 = sorted(glob(f'{directory}/*no*.nc'))\n", + "results3 = sorted(glob(f'{directory}/*valid*.nc'))\n", "\n", "wet_snow = xr.open_dataarray(results1[which_site])\n", "all_snow = xr.open_dataarray(results2[which_site])\n", @@ -345,6 +358,27 @@ " plt.tight_layout()" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "iles = sorted(glob('spicy_s1_stacks/*.nc'))\n", + "\n", + "\n", + "for f in files:\n", + " ds_name = f.split('stacks/')[-1].split('.')[0]\n", + " print(datetime.now(), f' -- starting {ds_name}')\n", + "\n", + " # Open dataset \n", + " ds_ = xr.open_dataset(f).load()\n", + " dataset = ds_[['s1','deltaVV','ims','fcf','lidar-sd']]\n", + " td = abs(pd.to_datetime(dataset.time) - pd.to_datetime(dataset.attrs['lidar-flight-time']))\n", + " closest_ts_idx = np.where(td == td.min())[0][0]\n", + " closest_ts = dataset.time[closest_ts_idx]" + ] + }, { "cell_type": "code", "execution_count": null, @@ -356,7 +390,7 @@ "c = 0.55\n", "wst = -3\n", "\n", - "for wst in [-4,-3,-2,-1,0,1,2,3,4]:\n", + "for wst in [-4,-3,-2,-1,0]:\n", " ds = calc_delta_cross_ratio(dataset, A=a, inplace=False)\n", " ds = calc_delta_gamma(ds, B=b, inplace=False)\n", " print(f'A={a:0.2f}; B={b:0.2f}; C={c:0.2f}; wst={wst:0.2f}')\n", diff --git a/spicy_snow/processing/wet_snow.py b/spicy_snow/processing/wet_snow.py index 3e75345..839ab5a 100644 --- a/spicy_snow/processing/wet_snow.py +++ b/spicy_snow/processing/wet_snow.py @@ -37,12 +37,12 @@ def id_newly_wet_snow(dataset: xr.Dataset, wet_thresh: int = -2, inplace: bool = dataset['wet_flag'] = xr.zeros_like(dataset['deltaVV']) # identify possible newly wet snow in regions FCF < 0.5 with deltaCR - dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] > 0.5) | (dataset['deltaCR'] > wet_thresh)), 1) + dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] > 0.5) | (dataset['deltaCR'] > wet_thresh) | (dataset['deltaCR'].isnull())), 1) # identify possible newly wet snow in regions FCF > 0.5 with deltaVV - dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] < 0.5) | (dataset['deltaVV'] > wet_thresh)), 1) + dataset['wet_flag'] = dataset['wet_flag'].where(((dataset['fcf'] < 0.5) | (dataset['deltaVV'] > wet_thresh) | (dataset['deltaVV'].isnull())), 1) # mask nans from Sentinel-1 data - dataset['wet_flag'] = dataset['wet_flag'].where(~dataset['deltaVV'].isnull()) + dataset['wet_flag'] = dataset['wet_flag'].where(~dataset['s1'].sel(band = 'VV').isnull(),np.nan) if not inplace: return dataset @@ -74,10 +74,10 @@ def id_newly_frozen_snow(dataset: xr.Dataset, freeze_thresh: int = 2, inplace: b dataset['freeze_flag'] = xr.zeros_like(dataset['deltaVV']) # identify possible re-freezing by increases of deltaGammaNaught of 2dB - dataset['freeze_flag'] = dataset['freeze_flag'].where(dataset['deltaGamma'] < freeze_thresh, 1) + dataset['freeze_flag'] = dataset['freeze_flag'].where((dataset['deltaGamma'] < freeze_thresh) | (dataset['deltaGamma'].isnull()), 1) # mask nans from Sentinel-1 data - dataset['freeze_flag'] = dataset['freeze_flag'].where(~dataset['deltaGamma'].isnull()) + dataset['freeze_flag'] = dataset['freeze_flag'].where(~dataset['snow_index'].isnull()) if not inplace: return dataset @@ -109,10 +109,10 @@ def id_wet_negative_si(dataset: xr.Dataset, wet_SI_thresh = 0, inplace: bool = F dataset['alt_wet_flag'] = xr.zeros_like(dataset['deltaVV']) # identify wetting of snow by negative snow index with snow present - dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(((dataset['ims'] != 4) | (dataset['snow_index'] > wet_SI_thresh)), 1) + dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(((dataset['ims'] != 4) | (dataset['snow_index'] > wet_SI_thresh) | (dataset['snow_index'].isnull())), 1) # mask nans from Sentinel-1 data - dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(~dataset['deltaVV'].isnull()) + dataset['alt_wet_flag'] = dataset['alt_wet_flag'].where(~dataset['snow_index'].isnull()) if not inplace: return dataset @@ -166,13 +166,14 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = prev_time)['wet_snow'] # add newly wet snow flags to old wet snow and then bound at 1 - dataset['wet_snow'].loc[dict(time = ts)]= dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['wet_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['alt_wet_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['wet_snow'] < 1, 1) - + + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['wet_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['wet_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['alt_wet_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] + dataset.sel(time = ts)['alt_wet_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where((dataset.sel(time = ts)['wet_snow'] < 1) | (dataset.sel(time = ts)['wet_snow'].isnull()), 1) + # add newly frozen snow flags to old wet snow and then bound at 0 to avoid negatives - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'] - dataset.sel(time = ts)['freeze_flag'] - dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(dataset.sel(time = ts)['wet_snow'] > 0, 0) + dataset['wet_snow'].loc[dict(time = ts)] = xr.where(~dataset.sel(time = ts)['freeze_flag'].isnull(),dataset.sel(time = ts)['wet_snow'] - dataset.sel(time = ts)['freeze_flag'],np.nan) + dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where((dataset.sel(time = ts)['wet_snow'] > 0) | (dataset.sel(time = ts)['wet_snow'].isnull()), 0) # make nans at areas without S1 data dataset['wet_snow'].loc[dict(time = ts)] = dataset.sel(time = ts)['wet_snow'].where(~dataset['s1'].sel(time = ts, band = 'VV').isnull(), np.nan) @@ -207,7 +208,7 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. # now if we are over 1 (ie it was flagged wet by dB and negative SI flags) we should floor those back to 1 dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ - dataset['perma_wet'].loc[dict(time = melt_orbit)].where(dataset['perma_wet'].loc[dict(time = melt_orbit)] <= 1 , 1) + dataset['perma_wet'].loc[dict(time = melt_orbit)].where((dataset['perma_wet'].loc[dict(time = melt_orbit)] <= 1) | (dataset['perma_wet'].loc[dict(time = melt_orbit)].isnull()) , 1) # now calculate the rolling mean of the perma wet so we have a % 0-1 of days out of 4 that were flagged dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ @@ -217,6 +218,7 @@ def flag_wet_snow(dataset: xr.Dataset, inplace: bool = False) -> Union[None, xr. # this will fail if bottleneck is installed due to it lacking the min_periods keyword # see: https://github.com/pydata/xarray/issues/4922 + if 'bottleneck' not in sys.modules: dataset['perma_wet'].loc[dict(time = melt_orbit)] = \ dataset['perma_wet'].loc[dict(time = melt_orbit)].rolling(time = len(orbit_dataset.time), min_periods = 1).max() diff --git a/tests/test_wetsnow.py b/tests/test_wetsnow.py index aa54bb3..eca50fe 100644 --- a/tests/test_wetsnow.py +++ b/tests/test_wetsnow.py @@ -21,6 +21,7 @@ class TestWetSnowFlags(unittest.TestCase): fcf = np.random.randn(10, 10)/10 + 0.5 deltaVV = np.random.randn(10, 10, 6) * 3 deltaCR = np.random.randn(10, 10, 6) * 3 + s1 = np.random.randn(10, 10, 6, 3) ims = np.full((10, 10, 6), 4, dtype = int) times = [np.datetime64(t) for t in ['2020-01-01','2020-01-02', '2020-01-07','2020-01-08', '2020-01-14', '2020-01-15']] x = np.linspace(0, 9, 10) @@ -32,6 +33,7 @@ class TestWetSnowFlags(unittest.TestCase): deltaVV = (["x", "y", "time"], deltaVV), deltaCR = (["x", "y", "time"], deltaCR), ims = (["x", "y", "time"], ims), + s1 = (["x", "y", "time", "band"], s1) ), coords = dict( lon = (["x", "y"], lon),