Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Nov 5, 2024
1 parent 009f1a7 commit 7f58c58
Show file tree
Hide file tree
Showing 25 changed files with 68 additions and 108 deletions.
5 changes: 1 addition & 4 deletions scripts/fia/00_download.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,14 @@
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from carbonplan_data.utils import process_sources"
]
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import pathlib\n",
"import zipfile\n",
"\n",
Expand Down
3 changes: 0 additions & 3 deletions scripts/fia/01_raw_to_parquet.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,6 @@
"metadata": {},
"outputs": [],
"source": [
"import io\n",
"import os.path\n",
"import pathlib\n",
"\n",
"import gcsfs\n",
"import pandas as pd\n",
Expand Down
5 changes: 3 additions & 2 deletions scripts/fia/01_raw_to_parquet_part2.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,10 @@
"metadata": {},
"outputs": [],
"source": [
"import fsspec\n",
"import pathlib\n",
"\n",
"import dask.dataframe as dd\n",
"import pathlib"
"import fsspec"
]
},
{
Expand Down
1 change: 0 additions & 1 deletion scripts/fluxnet/01_raw_to_parquet.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@
"import pathlib\n",
"\n",
"import dask.dataframe as dd\n",
"import fsspec\n",
"import gcsfs\n",
"import pandas as pd\n",
"from fsspec.implementations.zip import ZipFileSystem\n",
Expand Down
60 changes: 30 additions & 30 deletions scripts/glas/01_cache_glas_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@
"# machine urs.earthdata.nasa.gov login myusername password mypassword\n",
"# where 'myusername' and 'mypassword' are your Earthdata credentials.\n",
"#\n",
"from __future__ import print_function\n",
"\n",
"import base64\n",
"import itertools\n",
Expand All @@ -62,24 +61,24 @@
"from getpass import getpass\n",
"\n",
"try:\n",
" from urllib.error import HTTPError, URLError\n",
" from urllib.parse import urlparse\n",
" from urllib.request import (\n",
" urlopen,\n",
" HTTPCookieProcessor,\n",
" Request,\n",
" build_opener,\n",
" HTTPCookieProcessor,\n",
" urlopen,\n",
" )\n",
" from urllib.error import HTTPError, URLError\n",
"except ImportError:\n",
" from urlparse import urlparse\n",
" from urllib2 import (\n",
" urlopen,\n",
" Request,\n",
" HTTPCookieProcessor,\n",
" HTTPError,\n",
" Request,\n",
" URLError,\n",
" build_opener,\n",
" HTTPCookieProcessor,\n",
" urlopen,\n",
" )\n",
" from urlparse import urlparse\n",
"\n",
"# short_name = 'GLAH01'\n",
"# version = '033'\n",
Expand All @@ -96,9 +95,9 @@
"URS_URL = \"https://urs.earthdata.nasa.gov\"\n",
"CMR_PAGE_SIZE = 2000\n",
"CMR_FILE_URL = (\n",
" \"{0}/search/granules.json?provider=NSIDC_ECS\"\n",
" f\"{CMR_URL}/search/granules.json?provider=NSIDC_ECS\"\n",
" \"&sort_key[]=start_date&sort_key[]=producer_granule_id\"\n",
" \"&scroll=true&page_size={1}\".format(CMR_URL, CMR_PAGE_SIZE)\n",
" f\"&scroll=true&page_size={CMR_PAGE_SIZE}\"\n",
")\n",
"\n",
"\n",
Expand Down Expand Up @@ -138,22 +137,22 @@
" username, account, password = info.authenticators(urlparse(URS_URL).hostname)\n",
" errprefix = \"netrc error: \"\n",
" except Exception as e:\n",
" if not (\"No such file\" in str(e)):\n",
" print(\"netrc error: {0}\".format(str(e)))\n",
" if \"No such file\" not in str(e):\n",
" print(f\"netrc error: {str(e)}\")\n",
" username = None\n",
" password = None\n",
"\n",
" while not credentials:\n",
" if not username:\n",
" username = get_username()\n",
" password = get_password()\n",
" credentials = \"{0}:{1}\".format(username, password)\n",
" credentials = f\"{username}:{password}\"\n",
" credentials = base64.b64encode(credentials.encode(\"ascii\")).decode(\"ascii\")\n",
"\n",
" if url:\n",
" try:\n",
" req = Request(url)\n",
" req.add_header(\"Authorization\", \"Basic {0}\".format(credentials))\n",
" req.add_header(\"Authorization\", f\"Basic {credentials}\")\n",
" opener = build_opener(HTTPCookieProcessor())\n",
" opener.open(req)\n",
" except HTTPError:\n",
Expand All @@ -169,15 +168,15 @@
"def build_version_query_params(version):\n",
" desired_pad_length = 3\n",
" if len(version) > desired_pad_length:\n",
" print('Version string too long: \"{0}\"'.format(version))\n",
" print(f'Version string too long: \"{version}\"')\n",
" quit()\n",
"\n",
" version = str(int(version)) # Strip off any leading zeros\n",
" query_params = \"\"\n",
"\n",
" while len(version) <= desired_pad_length:\n",
" padded_version = version.zfill(desired_pad_length)\n",
" query_params += \"&version={0}\".format(padded_version)\n",
" query_params += f\"&version={padded_version}\"\n",
" desired_pad_length -= 1\n",
" return query_params\n",
"\n",
Expand All @@ -191,16 +190,16 @@
" polygon=None,\n",
" filename_filter=None,\n",
"):\n",
" params = \"&short_name={0}\".format(short_name)\n",
" params = f\"&short_name={short_name}\"\n",
" params += build_version_query_params(version)\n",
" params += \"&temporal[]={0},{1}\".format(time_start, time_end)\n",
" params += f\"&temporal[]={time_start},{time_end}\"\n",
" if polygon:\n",
" params += \"&polygon={0}\".format(polygon)\n",
" params += f\"&polygon={polygon}\"\n",
" elif bounding_box:\n",
" params += \"&bounding_box={0}\".format(bounding_box)\n",
" params += f\"&bounding_box={bounding_box}\"\n",
" if filename_filter:\n",
" option = \"&options[producer_granule_id][pattern]=true\"\n",
" params += \"&producer_granule_id[]={0}{1}\".format(filename_filter, option)\n",
" params += f\"&producer_granule_id[]={filename_filter}{option}\"\n",
" return CMR_FILE_URL + params\n",
"\n",
"\n",
Expand Down Expand Up @@ -262,7 +261,7 @@
" polygon=polygon,\n",
" filename_filter=filename_filter,\n",
" )\n",
" print(\"Querying for data:\\n\\t{0}\\n\".format(cmr_query_url))\n",
" print(f\"Querying for data:\\n\\t{cmr_query_url}\\n\")\n",
"\n",
" cmr_scroll_id = None\n",
" ctx = ssl.create_default_context()\n",
Expand All @@ -282,7 +281,7 @@
" cmr_scroll_id = headers[\"cmr-scroll-id\"]\n",
" hits = int(headers[\"cmr-hits\"])\n",
" if hits > 0:\n",
" print(\"Found {0} matches.\".format(hits))\n",
" print(f\"Found {hits} matches.\")\n",
" else:\n",
" print(\"Found no matches.\")\n",
" search_page = response.read()\n",
Expand Down Expand Up @@ -326,8 +325,9 @@
"outputs": [],
"source": [
"import os\n",
"import fsspec\n",
"\n",
"import dask\n",
"import fsspec\n",
"\n",
"\n",
"@dask.delayed\n",
Expand All @@ -338,7 +338,7 @@
" return out\n",
"\n",
" url_count = len(urls)\n",
" print(\"Downloading {0} files...\".format(url_count))\n",
" print(f\"Downloading {url_count} files...\")\n",
"\n",
" for index, url in enumerate(urls, start=1):\n",
" if not credentials and urlparse(url).scheme == \"https\":\n",
Expand All @@ -359,7 +359,7 @@
" # open(filename, 'wb').write(resp.content)\n",
" req = Request(url)\n",
" if credentials:\n",
" req.add_header(\"Authorization\", \"Basic {0}\".format(credentials))\n",
" req.add_header(\"Authorization\", f\"Basic {credentials}\")\n",
" opener = build_opener(HTTPCookieProcessor())\n",
"\n",
" with fsspec.open(target_url, mode=\"wb\") as target:\n",
Expand All @@ -368,11 +368,11 @@
" out.append(target_url)\n",
"\n",
" except HTTPError as e:\n",
" print(\"HTTPError {0}, {1}\".format(e.code, e.reason), filename)\n",
" print(f\"HTTPError {e.code}, {e.reason}\", filename)\n",
" except URLError as e:\n",
" print(\"URLError: {0}\".format(e.reason), filename)\n",
" except IOError:\n",
" print(\"IOError: {0}\".format(e.reason), filename)\n",
" print(f\"URLError: {e.reason}\", filename)\n",
" except OSError:\n",
" print(f\"IOError: {e.reason}\", filename)\n",
" except KeyboardInterrupt:\n",
" quit()\n",
" except:\n",
Expand Down
4 changes: 0 additions & 4 deletions scripts/global-biomass/01_biomass_to_cogs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,8 @@
"metadata": {},
"outputs": [],
"source": [
"import io\n",
"import os\n",
"import pathlib\n",
"\n",
"from google.cloud import storage\n",
"from rasterio.io import MemoryFile\n",
"from rio_cogeo.cogeo import cog_translate\n",
"from rio_cogeo.profiles import cog_profiles\n",
"\n",
Expand Down
2 changes: 0 additions & 2 deletions scripts/gridmet/01_gridmet_to_zarr.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,7 @@
"outputs": [],
"source": [
"import gcsfs\n",
"import intake\n",
"import xarray as xr\n",
"import zarr\n",
"from numcodecs.zlib import Zlib\n",
"\n",
"fs = gcsfs.GCSFileSystem(\n",
Expand Down
5 changes: 3 additions & 2 deletions scripts/grids/make_grid.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import os\n",
"from datetime import datetime\n",
"\n",
"import numpy as np\n",
"import rasterio as rio\n",
"import xarray as xr\n",
"from datetime import datetime\n",
"from rasterio.warp import transform\n",
"\n",
"from carbonplan_data import cat"
Expand Down
2 changes: 0 additions & 2 deletions scripts/mtbs/02_mtbs_to_zarr.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,7 @@
"import gcsfs\n",
"import numpy as np\n",
"import rasterio\n",
"import rioxarray\n",
"import xarray as xr\n",
"import zarr\n",
"from numcodecs.zlib import Zlib\n",
"from rasterio import Affine\n",
"from rasterio.crs import CRS\n",
Expand Down
2 changes: 0 additions & 2 deletions scripts/mtbs/03_mtbs_to_zarr.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,7 @@
"import gcsfs\n",
"import numpy as np\n",
"import rasterio\n",
"import rioxarray\n",
"import xarray as xr\n",
"import zarr\n",
"from numcodecs.zlib import Zlib\n",
"from rasterio import Affine\n",
"from rasterio.crs import CRS\n",
Expand Down
25 changes: 7 additions & 18 deletions scripts/mtbs/04_mtbs_perims_to_raster.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,24 +42,14 @@
"metadata": {},
"outputs": [],
"source": [
"from carbonplan.data import cat\n",
"import xarray as xr\n",
"import geopandas\n",
"import hvplot.pandas # noqa\n",
"import numpy as np\n",
"\n",
"import pandas as pd\n",
"\n",
"import geopandas\n",
"\n",
"import rasterio\n",
"from rasterio import Affine\n",
"from rasterio.transform import rowcol\n",
"from rasterio.features import rasterize\n",
"from rasterio.transform import from_bounds\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import zarr\n",
"\n",
"import hvplot.pandas # noqa"
"import xarray as xr\n",
"from carbonplan.data import cat\n",
"from rasterio.features import rasterize"
]
},
{
Expand Down Expand Up @@ -158,9 +148,9 @@
"metadata": {},
"outputs": [],
"source": [
"from rio_cogeo.profiles import cog_profiles\n",
"from rasterio.io import MemoryFile\n",
"from rio_cogeo.cogeo import cog_translate"
"from rio_cogeo.cogeo import cog_translate\n",
"from rio_cogeo.profiles import cog_profiles"
]
},
{
Expand Down Expand Up @@ -257,7 +247,6 @@
"outputs": [],
"source": [
"import intake\n",
"import xarray as xr\n",
"from dask.diagnostics import ProgressBar\n",
"\n",
"cat2 = intake.open_catalog(\n",
Expand Down
1 change: 0 additions & 1 deletion scripts/nftd/00_download.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import pathlib\n",
"import zipfile\n",
"\n",
Expand Down
2 changes: 0 additions & 2 deletions scripts/nftd/01_nftd_to_cogs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,10 @@
"metadata": {},
"outputs": [],
"source": [
"import io\n",
"import os\n",
"import pathlib\n",
"\n",
"from google.cloud import storage\n",
"from rasterio.io import MemoryFile\n",
"from rio_cogeo.cogeo import cog_translate\n",
"from rio_cogeo.profiles import cog_profiles"
]
Expand Down
2 changes: 1 addition & 1 deletion scripts/nftd/02_downsampling_and_reprojection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@
"metadata": {},
"outputs": [],
"source": [
"import rasterio\n",
"from rio_cogeo.cogeo import cog_translate\n",
"from rio_cogeo.profiles import cog_profiles\n",
"import rasterio\n",
"\n",
"dst_profile = cog_profiles.get(\"deflate\")"
]
Expand Down
1 change: 0 additions & 1 deletion scripts/nlcd/00_download.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import pathlib\n",
"import zipfile\n",
"\n",
Expand Down
2 changes: 0 additions & 2 deletions scripts/nlcd/01_nlcd_to_cogs.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,10 @@
"metadata": {},
"outputs": [],
"source": [
"import io\n",
"import os\n",
"import pathlib\n",
"\n",
"from google.cloud import storage\n",
"from rasterio.io import MemoryFile\n",
"from rio_cogeo.cogeo import cog_translate\n",
"from rio_cogeo.profiles import cog_profiles\n",
"\n",
Expand Down
Loading

0 comments on commit 7f58c58

Please sign in to comment.