From 5a66795751dc2a78348f360844c89c18f73b53f8 Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Mon, 22 Jul 2024 13:07:07 -0600 Subject: [PATCH 1/4] Add type annotations to an unannotated module --- antarctica_today/compute_mean_climatology.py | 128 +++++++++++-------- 1 file changed, 74 insertions(+), 54 deletions(-) diff --git a/antarctica_today/compute_mean_climatology.py b/antarctica_today/compute_mean_climatology.py index 5c5cfef..d46de1f 100644 --- a/antarctica_today/compute_mean_climatology.py +++ b/antarctica_today/compute_mean_climatology.py @@ -1,6 +1,8 @@ import datetime import os import pickle +from pathlib import Path +from typing import Dict, Literal, Optional, Tuple, Union import numpy import pandas @@ -26,13 +28,13 @@ def compute_daily_climatology_pixel_averages( - baseline_start_year=1990, - melt_start_mmdd=(10, 1), - baseline_end_year=2020, - melt_end_mmdd=(4, 30), - output_picklefile=daily_melt_averages_picklefile, - verbose=True, -): + baseline_start_year: int = 1990, + melt_start_mmdd: Tuple[int, int] = (10, 1), + baseline_end_year: int = 2020, + melt_end_mmdd: Tuple[int, int] = (4, 30), + output_picklefile: Path = daily_melt_averages_picklefile, + verbose: bool = True, +) -> Tuple[numpy.ndarray, Dict[Tuple[int, int], int]]: """Compute fraction of days in the baseline period in which each give pixel melts. Use the baseline period. Calculate, of the days with data (ignoring @@ -46,7 +48,7 @@ def compute_daily_climatology_pixel_averages( melt_array, datetimes_dict = read_model_array_picklefile(resample_melt_codes=True) # Recode melt array from (-1, 0, 1, 2), to (nan, nan, 0.0, 1.0), and convert to floating-point - melt_array_nan_filled = numpy.array(melt_array, dtype=numpy.float32) + melt_array_nan_filled: numpy.ndarray = numpy.array(melt_array, dtype=numpy.float32) melt_array_nan_filled[melt_array_nan_filled == -1.0] = numpy.nan melt_array_nan_filled[melt_array_nan_filled == 0.0] = numpy.nan melt_array_nan_filled[melt_array_nan_filled == 1.0] = 0.0 @@ -112,14 +114,14 @@ def compute_daily_climatology_pixel_averages( # baseline_dt_list_day_of_months = numpy.array([dt.day for dt in dt_list_melt_season], dtype=numpy.uint8) # Generate an empty MxNxT array with - average_melt_array = numpy.zeros( + average_melt_array: numpy.ndarray = numpy.zeros( (melt_array.shape[0], melt_array.shape[1], len(baseline_filler_dt_list)), dtype=float, ) # Now, compute the average odds (0-1) of melt on any given day for any given pixel, over the baseline period. for i, bdt in enumerate(baseline_filler_dt_list): - bdt_day_mask = numpy.array( + bdt_day_mask: numpy.ndarray = numpy.array( [ ((dt.month == bdt.month) and (dt.day == bdt.day)) for dt in dt_list_melt_season @@ -175,9 +177,9 @@ def compute_daily_climatology_pixel_averages( def read_daily_melt_averages_picklefile( - build_picklefile_if_not_present=True, - daily_climatology_picklefile=daily_melt_averages_picklefile, - verbose=True, + build_picklefile_if_not_present: bool = True, + daily_climatology_picklefile: Path = daily_melt_averages_picklefile, + verbose: bool = True, ): """Read the daily climatology averages picklefile.""" if not os.path.exists(daily_climatology_picklefile): @@ -192,18 +194,18 @@ def read_daily_melt_averages_picklefile( if verbose: print("Reading", daily_climatology_picklefile) - f = open(daily_climatology_picklefile, "rb") - array, dt_dict = pickle.load(f) - f.close() + + with open(daily_climatology_picklefile, "rb") as f: + array, dt_dict = pickle.load(f) return array, dt_dict def compute_daily_sum_pixel_averages( - daily_picklefile=daily_melt_averages_picklefile, - sum_picklefile=daily_cumulative_melt_averages_picklefile, - verbose=True, -): + daily_picklefile: Path = daily_melt_averages_picklefile, + sum_picklefile: Path = daily_cumulative_melt_averages_picklefile, + verbose: bool = True, +) -> None: """Compute a mean daily cumulative melt-day value for each pixel throughout the melt season. {(mm,dd):(MxN array of integer melt days)} @@ -217,7 +219,7 @@ def compute_daily_sum_pixel_averages( """ # First, read the daily melt value picklefile. daily_array, dt_dict = read_daily_melt_averages_picklefile(verbose=verbose) - daily_sum_array = numpy.zeros(daily_array.shape, dtype=numpy.int32) + daily_sum_array: numpy.ndarray = numpy.zeros(daily_array.shape, dtype=numpy.int32) for dt in dt_dict: daily_sum_array[:, :, dt_dict[dt]] = numpy.array( numpy.round( @@ -228,17 +230,18 @@ def compute_daily_sum_pixel_averages( if verbose: print("Writing", sum_picklefile, end="...") - f = open(sum_picklefile, "wb") - pickle.dump((daily_sum_array, dt_dict), f) - f.close() + + with open(sum_picklefile, "wb") as f: + pickle.dump((daily_sum_array, dt_dict), f) + if verbose: print("Done.") def read_daily_sum_melt_averages_picklefile( - build_picklefile_if_not_present=True, - daily_sum_picklefile=daily_cumulative_melt_averages_picklefile, - verbose=True, + build_picklefile_if_not_present: bool = True, + daily_sum_picklefile: Path = daily_cumulative_melt_averages_picklefile, + verbose: bool = True, ): """Read the daily climatology averages picklefile.""" if not os.path.exists(daily_sum_picklefile): @@ -261,14 +264,14 @@ def read_daily_sum_melt_averages_picklefile( def create_baseline_climatology_tif( - start_date=datetime.datetime(1990, 10, 1), - end_date=datetime.datetime(2020, 4, 30), - f_out_mean=mean_climatology_geotiff, - f_out_std=std_climatology_geotiff, - round_to_integers=True, - gap_filled=True, - verbose=True, -): + start_date: datetime.datetime = datetime.datetime(1990, 10, 1), + end_date: datetime.datetime = datetime.datetime(2020, 4, 30), + f_out_mean: str = mean_climatology_geotiff, + f_out_std: str = std_climatology_geotiff, + round_to_integers: bool = True, + gap_filled: bool = True, + verbose: bool = True, +) -> numpy.ndarray: """Generate a "mean annual melt" map over the baseline period. The melt year for each season is defined from the (mm,dd) from the "start_date" @@ -287,7 +290,9 @@ def create_baseline_climatology_tif( num_years = int((end_date - start_date).days / 365.25) # print(num_years) - annual_sum_grids = numpy.empty(model_array.shape[0:2] + (num_years,), dtype=int) + annual_sum_grids: numpy.ndarray = numpy.empty( + model_array.shape[0:2] + (num_years,), dtype=int + ) if gap_filled: model_melt_days = model_array @@ -306,7 +311,7 @@ def create_baseline_climatology_tif( # print(i, dt1, dt2) - dates_mask = numpy.array( + dates_mask: numpy.ndarray = numpy.array( [(dt >= dt1) & (dt <= dt2) for dt in datetimes], dtype=bool ) @@ -340,16 +345,23 @@ def create_baseline_climatology_tif( def create_partial_year_melt_anomaly_tif( - current_datetime=None, dest_fname=None, gap_filled=True, verbose=True -): - """Create a tif of melt anomlay compared to baseline climatology for that day of the melt season.""" + current_datetime_in: Optional[datetime.datetime] = None, + dest_fname: Optional[str] = None, + gap_filled: bool = True, + verbose: bool = True, +) -> numpy.ndarray: + """Create a tif of melt anomaly compared to baseline climatology for that day of the melt season.""" # If no datetime is given, use "today" - if current_datetime is None: + current_datetime: datetime.datetime + if current_datetime_in is None: now = datetime.datetime.today() - # Strip of the hour,min,second + # Strip off the hour,min,second + # TODO: Why not use a datetime.date object instead? current_datetime = datetime.datetime( year=now.year, month=now.month, day=now.day ) + else: + current_datetime = current_datetime_in daily_melt_sums, daily_sums_dt_dict = read_daily_sum_melt_averages_picklefile() @@ -376,7 +388,7 @@ def create_partial_year_melt_anomaly_tif( ) dt_list = sorted(list(dt_dict.keys())) - dt_mask = numpy.array( + dt_mask: numpy.ndarray = numpy.array( [ ((dt >= first_dt_of_present_melt_season) and (dt <= current_datetime)) for dt in dt_list @@ -413,7 +425,7 @@ def create_partial_year_melt_anomaly_tif( anomaly_this_season_so_far[ice_mask == 0] = -999 # Round to integers, if it isn't already. - anomalies_int = numpy.array( + anomalies_int: numpy.ndarray = numpy.array( numpy.round(anomaly_this_season_so_far), dtype=numpy.int32 ) @@ -434,7 +446,11 @@ def create_partial_year_melt_anomaly_tif( def create_annual_melt_anomaly_tif( - year, year_melt_tif=None, baseline_melt_tif=None, gap_filled=True, verbose=True + year: int, + year_melt_tif: Optional[str] = None, + baseline_melt_tif: Optional[str] = None, + gap_filled: bool = True, + verbose: bool = True, ): """Create a tif of annual melt anomaly compared to baseline climatology. @@ -522,7 +538,11 @@ def get_baseline_climatology_array(fname=None, gap_filled=True): return create_baseline_climatology_tif(gap_filled=gap_filled) -def get_annual_melt_sum_array(year, fname=None, gap_filled=True): +def get_annual_melt_sum_array( + year: int, + fname: Optional[str] = None, + gap_filled: bool = True, +): """Retrieve the melt year array from the tif. If it's not available, create it and write the file, then return it. @@ -545,13 +565,13 @@ def get_annual_melt_sum_array(year, fname=None, gap_filled=True): def create_annual_melt_sum_tif( - year="all", - output_tif=None, - melt_start_mmdd=(10, 1), - melt_end_mmdd=(4, 30), - gap_filled=True, - verbose=True, -): + year: Union[Literal["all"], int] = "all", + output_tif: Optional[str] = None, + melt_start_mmdd: Tuple[int, int] = (10, 1), + melt_end_mmdd: Tuple[int, int] = (4, 30), + gap_filled: bool = True, + verbose: bool = True, +) -> Optional[numpy.ndarray]: """Create an integer tif file of that year's annual sum of melt-days, per pixel. If gap_filled, create a floating-point tif file of the same. @@ -589,7 +609,7 @@ def create_annual_melt_sum_tif( day=melt_end_mmdd[1], ) - dates_mask = numpy.array( + dates_mask: numpy.ndarray = numpy.array( [((dt >= start_date) and (dt <= end_date)) for dt in dt_list], dtype=bool, ) From 18d651aaf074300c2f1e3ec15777f079699553bd Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Mon, 22 Jul 2024 13:10:25 -0600 Subject: [PATCH 2/4] Upgrade earthaccess to a version with py.typed marker --- conda-lock.yml | 132 ++++++++++++++++++++++++++++++++++++++++-------- environment.yml | 2 +- pyproject.toml | 1 - 3 files changed, 111 insertions(+), 24 deletions(-) diff --git a/conda-lock.yml b/conda-lock.yml index 4f4db94..715083a 100644 --- a/conda-lock.yml +++ b/conda-lock.yml @@ -13,7 +13,7 @@ version: 1 metadata: content_hash: - linux-64: c064f551987bcec9b1baa77856e7ac2c2861223183c6864530115a9f96658a0a + linux-64: f3266edc27a5d2b17f9d24a563487e778a5aeccd2d192a3a31f5525b50817296 channels: - url: conda-forge used_env_vars: [] @@ -917,22 +917,26 @@ package: category: main optional: false - name: earthaccess - version: 0.6.1 + version: 0.10.0 manager: conda platform: linux-64 dependencies: - fsspec: '>=2022.1' + fsspec: '>=2022.11' + importlib-resources: '>=6.3.2' multimethod: '>=1.8' + numpy: '>=1.24.0' pqdm: '>=0.1' - python: '>=3.8,<4.0' - python-cmr: '>=0.7' + python: '>=3.9,<4.0' + python-cmr: '>=0.10.0' + python-dateutil: '>=2.8.2' requests: '>=2.26' - s3fs: '>=2021.11,<2024' + s3fs: '>=2022.11' tinynetrc: '>=1.3.1' - url: https://conda.anaconda.org/conda-forge/noarch/earthaccess-0.6.1-pyhd8ed1ab_0.conda + typing_extensions: '>=4.10.0' + url: https://conda.anaconda.org/conda-forge/noarch/earthaccess-0.10.0-pyhd8ed1ab_0.conda hash: - md5: f11bd0706b9d6629caf45050e250dcff - sha256: c8e5ba0a0de59543e73b81df6eb5e73434c4a23dd05bf394edbcd824d9b4223b + md5: a613b51c489c244349d61783a436357d + sha256: c7d07af7a8a69c626be06191f06628e6ea32e9bedbf0f92ee4d1d010d362fa10 category: main optional: false - name: ensureconda @@ -1166,15 +1170,15 @@ package: category: main optional: false - name: fsspec - version: 2023.12.2 + version: 2024.6.1 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.2-pyhca7485f_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.6.1-pyhff2d567_0.conda hash: - md5: bf40f2a8835b78b1f91083d306b493d2 - sha256: 9269a5464698e0fde1f9c78544552817370c26df86e2a5a7518544b6a55ae8ee + md5: 996bf792cdb8c0ac38ff54b9fde56841 + sha256: 2b8e98294c70d9a33ee0ef27539a8a8752a26efeafa0225e85dc876ef5bb49f4 category: main optional: false - name: gdal @@ -1431,6 +1435,19 @@ package: sha256: e40d7e71c37ec95df9a19d39f5bb7a567c325be3ccde06290a71400aab719cac category: main optional: false +- name: importlib-resources + version: 6.4.0 + manager: conda + platform: linux-64 + dependencies: + importlib_resources: '>=6.4.0,<6.4.1.0a0' + python: '>=3.8' + url: https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.4.0-pyhd8ed1ab_0.conda + hash: + md5: dcbadab7a68738a028e195ab68ab2d2e + sha256: 38db827f445ae437a15d50a94816ae67a48285d0700f736af3eb90800a71f079 + category: main + optional: false - name: importlib_metadata version: 8.0.0 manager: conda @@ -2713,6 +2730,20 @@ package: sha256: b53eda154e13fd49c494eb7ba95b22b2b7c72cbeab4ed3a2213144d75558bc9f category: main optional: false +- name: pandas-stubs + version: 2.2.2.240603 + manager: conda + platform: linux-64 + dependencies: + numpy: '>=1.26.0' + python: '>=3.9' + types-pytz: '>=2022.1.1' + url: https://conda.anaconda.org/conda-forge/noarch/pandas-stubs-2.2.2.240603-pyhd8ed1ab_0.conda + hash: + md5: 2ffa854e866926e8e6a76274b9aca854 + sha256: f22e5bb371fac515c4a53d49fe4d7fcddc71136e5ed3094fde0f37dfc249d244 + category: main + optional: false - name: pastel version: 0.2.1 manager: conda @@ -2725,6 +2756,20 @@ package: sha256: 9153f0f38c76a09da7688a61fdbf8f3d7504e2326bef53e4ec20d994311b15bd category: main optional: false +- name: patsy + version: 0.5.6 + manager: conda + platform: linux-64 + dependencies: + numpy: '>=1.4.0' + python: '>=3.6' + six: '' + url: https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda + hash: + md5: a5b55d1cb110cdcedc748b5c3e16e687 + sha256: 35ad5cab1d9c08cf98576044bf28f75e62f8492afe6d1a89c94bbe93dc8d7258 + category: main + optional: false - name: pcre2 version: '10.40' manager: conda @@ -3236,18 +3281,18 @@ package: category: main optional: false - name: s3fs - version: 2023.12.2 + version: 2024.6.1 manager: conda platform: linux-64 dependencies: aiobotocore: '>=2.5.4,<3.0.0' aiohttp: '' - fsspec: 2023.12.2 + fsspec: 2024.6.1 python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/s3fs-2023.12.2-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/s3fs-2024.6.1-pyhd8ed1ab_0.conda hash: - md5: e6de0e35f836bef08cbcfeb50337d2a4 - sha256: d3bdcc9eb8c61a383a1199d62f9c20c124fbcf46289600bfe3dfbdd7f813d7d3 + md5: 2120af180562f945c3fccc39972023da + sha256: ce9c6c147b0ad563f3decdb11381a8784b297da0a75d3b6c0ea1fd016df4be6a category: main optional: false - name: scikit-learn @@ -3309,15 +3354,15 @@ package: category: main optional: false - name: setuptools - version: 71.0.1 + version: 71.0.4 manager: conda platform: linux-64 dependencies: python: '>=3.8' - url: https://conda.anaconda.org/conda-forge/noarch/setuptools-71.0.1-pyhd8ed1ab_0.conda + url: https://conda.anaconda.org/conda-forge/noarch/setuptools-71.0.4-pyhd8ed1ab_0.conda hash: - md5: aede3d5c0882ebed2f07024400a111ed - sha256: b09ba557d62111d315f1841176cf01fd75e5ae0ae9d6360ccb6aaca1e9a6935f + md5: ee78ac9c720d0d02fcfd420866b82ab1 + sha256: e1b5dd28d2ea2a7ad660fbc8d1f2ef682a2f8460f80240d836d62e56225ac680 category: main optional: false - name: shapely @@ -3389,6 +3434,25 @@ package: sha256: e849d576e52bf3e6fc5786f89b7d76978f2e2438587826c95570324cb572e52b category: main optional: false +- name: statsmodels + version: 0.14.2 + manager: conda + platform: linux-64 + dependencies: + libgcc-ng: '>=12' + numpy: '>=1.19,<3' + packaging: '>=21.3' + pandas: '>=1.4,!=2.1.0' + patsy: '>=0.5.6' + python: '>=3.11,<3.12.0a0' + python_abi: 3.11.* + scipy: '>=1.8,!=1.9.2' + url: https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.2-py311h18e1886_0.conda + hash: + md5: 82c29bf38b3fb66da09736106609b5fe + sha256: c43daa497cd56e918b84952f986106c02b416574529809bce2942145f33b97d8 + category: main + optional: false - name: threadpoolctl version: 3.5.0 manager: conda @@ -3507,6 +3571,30 @@ package: sha256: 8ab6cb9ba91abab6ac65770de7f58b0b0e32d45b34e9b4f1d5d377edde370e0a category: main optional: false +- name: types-python-dateutil + version: 2.9.0.20240316 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/types-python-dateutil-2.9.0.20240316-pyhd8ed1ab_0.conda + hash: + md5: 7831efa91d57475373ee52fb92e8d137 + sha256: 6630bbc43dfb72339fadafc521db56c9d17af72bfce459af195eecb01163de20 + category: main + optional: false +- name: types-pytz + version: 2024.1.0.20240417 + manager: conda + platform: linux-64 + dependencies: + python: '>=3.6' + url: https://conda.anaconda.org/conda-forge/noarch/types-pytz-2024.1.0.20240417-pyhd8ed1ab_0.conda + hash: + md5: 7b71ace1b99195041329427c435b8125 + sha256: cc3913a5504b867c748981ba302e82dbc2bda71837f4894d29db8f6cb490e25d + category: main + optional: false - name: typing-extensions version: 4.12.2 manager: conda diff --git a/environment.yml b/environment.yml index 9c84f09..9cc0619 100644 --- a/environment.yml +++ b/environment.yml @@ -9,7 +9,7 @@ dependencies: # Runtime dependencies # -------------------- - click ~=8.1 - - earthaccess ~=0.6.1 + - earthaccess ~=0.10.0 - gdal ~=3.5 - pandas ~=1.4 - numpy ~=1.26.2 diff --git a/pyproject.toml b/pyproject.toml index 265aec2..67f7132 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,7 +57,6 @@ disallow_incomplete_defs = false module = [ "cairo.*", "cartopy.*", - "earthaccess.*", # TODO: Remove after upgrading earthaccess >=0.10 "gdal.*", "geopandas.*", "gi.*", From fcfad5dae795be062dd89def161ca53862d83a36 Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Mon, 22 Jul 2024 16:34:03 -0600 Subject: [PATCH 3/4] Restore prior function interface Use a different type narrowing approach --- antarctica_today/compute_mean_climatology.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/antarctica_today/compute_mean_climatology.py b/antarctica_today/compute_mean_climatology.py index d46de1f..b12306e 100644 --- a/antarctica_today/compute_mean_climatology.py +++ b/antarctica_today/compute_mean_climatology.py @@ -345,23 +345,26 @@ def create_baseline_climatology_tif( def create_partial_year_melt_anomaly_tif( - current_datetime_in: Optional[datetime.datetime] = None, + current_datetime: Optional[datetime.datetime] = None, dest_fname: Optional[str] = None, gap_filled: bool = True, verbose: bool = True, ) -> numpy.ndarray: """Create a tif of melt anomaly compared to baseline climatology for that day of the melt season.""" # If no datetime is given, use "today" - current_datetime: datetime.datetime - if current_datetime_in is None: + if current_datetime is None: now = datetime.datetime.today() # Strip off the hour,min,second # TODO: Why not use a datetime.date object instead? current_datetime = datetime.datetime( year=now.year, month=now.month, day=now.day ) - else: - current_datetime = current_datetime_in + + if not isinstance(current_datetime, datetime.datetime): + raise ValueError( + f"Unexpected value for current_datetime: {current_datetime}." + "This should never happen, but this helps the typechecker narrow." + ) daily_melt_sums, daily_sums_dt_dict = read_daily_sum_melt_averages_picklefile() From 6ce8068f5592265899febc4da06fe43e7a42329f Mon Sep 17 00:00:00 2001 From: Matt Fisher Date: Mon, 22 Jul 2024 17:14:16 -0600 Subject: [PATCH 4/4] Help the typechecker some more --- antarctica_today/compute_mean_climatology.py | 7 +++---- antarctica_today/melt_array_picklefile.py | 12 ++++++------ 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/antarctica_today/compute_mean_climatology.py b/antarctica_today/compute_mean_climatology.py index b12306e..6e4d1cd 100644 --- a/antarctica_today/compute_mean_climatology.py +++ b/antarctica_today/compute_mean_climatology.py @@ -429,7 +429,8 @@ def create_partial_year_melt_anomaly_tif( # Round to integers, if it isn't already. anomalies_int: numpy.ndarray = numpy.array( - numpy.round(anomaly_this_season_so_far), dtype=numpy.int32 + numpy.round(anomaly_this_season_so_far), + dtype=numpy.int32, ) # If dest_fname is None, create it. @@ -591,9 +592,7 @@ def create_annual_melt_sum_tif( dt_list = list(datetimes_dict.keys()) if year == "all": - years = numpy.unique([dt.year for dt in dt_list]) - years.sort() - + years = sorted({dt.year for dt in dt_list}) else: assert year == int(year) years = [year] diff --git a/antarctica_today/melt_array_picklefile.py b/antarctica_today/melt_array_picklefile.py index b21595d..f6219d9 100755 --- a/antarctica_today/melt_array_picklefile.py +++ b/antarctica_today/melt_array_picklefile.py @@ -15,7 +15,7 @@ import pickle import re from pathlib import Path -from typing import Any +from typing import Any, Dict, Tuple import numpy from osgeo import gdal @@ -323,15 +323,15 @@ def _filter_out_erroneous_swaths(model_array, datetimes_dict): def read_gap_filled_melt_picklefile( - picklefile=gap_filled_melt_picklefile, verbose=True -): + picklefile: Path = gap_filled_melt_picklefile, + verbose: bool = True, +) -> Tuple[numpy.ndarray, Dict[datetime.datetime, int]]: """Read the gap-filled picklefile, return to user.""" if verbose: print("Reading", picklefile) - f = open(picklefile, "rb") - array, dt_dict = pickle.load(f) - f.close() + with open(picklefile, "rb") as f: + array, dt_dict = pickle.load(f) return array, dt_dict