Skip to content

Commit

Permalink
Merge branch 'main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
gadomski authored Aug 7, 2024
2 parents 55f7736 + 32fa246 commit 8e24411
Show file tree
Hide file tree
Showing 19 changed files with 84 additions and 32 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/continuous-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.9", "3.10", "3.11", "3.12"]
defaults:
run:
shell: bash -l {0}
Expand Down
24 changes: 9 additions & 15 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,34 +2,28 @@
# Please run `pre-commit run --all-files` when adding or changing entries.

repos:
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: black
- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
rev: v2.3.0
hooks:
- id: codespell
args: [--ignore-words=.codespellignore]
types_or: [jupyter, markdown, python, shell]
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.6
hooks:
- id: flake8
- id: ruff
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.2.0
rev: v1.11.1
hooks:
- id: mypy
additional_dependencies:
- click != 8.1.0
- click
- stactools
- pytest
- types-requests
- types-python-dateutil
- repo: https://github.com/pycqa/isort
rev: 5.12.0
hooks:
- id: isort
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.33.0
rev: v0.41.0
hooks:
- id: markdownlint
2 changes: 1 addition & 1 deletion CODE_OF_CONDUCT.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
level of experience, education, socioeconomic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.

## Our Standards
Expand Down
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,13 @@ To run the tests:
pytest
```

Many tests are skipped by default because access external data, which makes them slow and a bit flakey.
To run these tests:

```shell
pytest --external-data
```

### Updating static resources

There are several .json files in the source tree that are used to populate
Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ FROM ghcr.io/stac-utils/stactools:$STACTOOLS_VERSION

ARG DOCKER_NAMESPACE_PACKAGE_DIR
ARG DOCKER_WORKDIR
ENV PYTHON_VERSION=3.10
ENV PYTHON_VERSION=3.11
WORKDIR $DOCKER_WORKDIR

# For caching purposes, install dependencies but remove the actual package
Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile-dev
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ FROM ghcr.io/stac-utils/stactools:${STACTOOLS_VERSION}-dev

ARG DOCKER_NAMESPACE_PACKAGE_DIR
ARG DOCKER_WORKDIR
ENV PYTHON_VERSION=3.10
ENV PYTHON_VERSION=3.11
WORKDIR $DOCKER_WORKDIR

RUN apt-get -y -q update \
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ dependencies:
- conda-forge::gdal>=3.3
- conda-forge::geos>=3.3
- conda-forge::rasterio>=1.3
- conda-forge::sqlite
- conda-forge::libstdcxx-ng # gdal dependency. Make sure it's from the same channel as gdal
4 changes: 1 addition & 3 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
black
codespell
flake8
isort
mypy
pre-commit
pytest
pytest-cov
ruff
types-python-dateutil
types-requests
4 changes: 1 addition & 3 deletions scripts/format
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,7 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
if [ "${1:-}" = "--help" ]; then
usage
else
# Sort imports
pre-commit run isort --all-files
# Code formatting
pre-commit run black --all-files
pre-commit run ruff-format --all-files
fi
fi
2 changes: 1 addition & 1 deletion scripts/lint
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
usage
else
# Lint
pre-commit run flake8 --all-files
pre-commit run ruff --all-files
# Type checking
pre-commit run mypy --all-files
fi
Expand Down
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@ keywords =
classifiers =
Development Status :: 4 - Beta
License :: OSI Approved :: Apache Software License
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12

[options]
package_dir =
Expand Down
30 changes: 30 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
from typing import Any

import pytest
from pytest import Config, Parser


def pytest_addoption(parser: Parser) -> None:
parser.addoption(
"--external-data",
action="store_true",
default=False,
help="run tests that require external data",
)


def pytest_configure(config: Config) -> None:
config.addinivalue_line(
"markers",
"external_data: marks tests requiring external data, "
"and disables them by default (enable with --external-data)",
)


def pytest_collection_modifyitems(config: Config, items: Any) -> None:
if config.getoption("--external-data"):
return
skip_network_access = pytest.mark.skip(reason="need --external-data option to run")
for item in items:
if "external_data" in item.keywords:
item.add_marker(skip_network_access)
3 changes: 3 additions & 0 deletions tests/ocean_heat_content/test_commands.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from pathlib import Path

import pytest
from pystac import Collection, ItemCollection

from tests import run_command, test_data
Expand All @@ -16,6 +17,7 @@ def test_create_collection(tmp_path: Path) -> None:
collection.validate()


@pytest.mark.external_data
def test_create_items(tmp_path: Path) -> None:
destination = tmp_path / "item-collection.json"
infile = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
Expand All @@ -38,6 +40,7 @@ def test_download() -> None:
assert result.exit_code == 0


@pytest.mark.external_data
def test_cogify(tmp_path: Path) -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
result = run_command(f"noaa-cdr ocean-heat-content cogify {path} -o {tmp_path}")
Expand Down
12 changes: 11 additions & 1 deletion tests/ocean_heat_content/test_stac.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from pystac.extensions.projection import ProjectionExtension
from pystac.extensions.raster import RasterExtension
from pystac.extensions.scientific import ScientificExtension

from stactools.noaa_cdr.ocean_heat_content import cog, stac

from .. import test_data
Expand Down Expand Up @@ -51,6 +50,7 @@ def test_create_collection() -> None:
collection.validate_all()


@pytest.mark.external_data
def test_create_items_one_netcdf(tmp_path: Path) -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
items = stac.create_items([path], str(tmp_path))
Expand Down Expand Up @@ -88,6 +88,7 @@ def test_create_items_one_netcdf(tmp_path: Path) -> None:
item.validate()


@pytest.mark.external_data
def test_create_items_one_netcdf_cog_hrefs(tmp_path: Path) -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
items = stac.create_items([path], str(tmp_path))
Expand All @@ -104,6 +105,7 @@ def test_create_items_one_netcdf_cog_hrefs(tmp_path: Path) -> None:
assert len(new_items) == len(items)


@pytest.mark.external_data
def test_create_items_two_netcdfs_same_items(tmp_path: Path) -> None:
paths = [
test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc"),
Expand All @@ -118,6 +120,7 @@ def test_create_items_two_netcdfs_same_items(tmp_path: Path) -> None:
item.validate()


@pytest.mark.external_data
def test_create_items_two_netcdfs_different_items() -> None:
paths = [
test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc"),
Expand All @@ -133,6 +136,7 @@ def test_create_items_two_netcdfs_different_items() -> None:
item.validate()


@pytest.mark.external_data
def test_create_items_one_netcdf_latest_only(tmp_path: Path) -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
items = stac.create_items([path], str(tmp_path), latest_only=True)
Expand All @@ -155,6 +159,7 @@ def test_create_items_one_netcdf_latest_only(tmp_path: Path) -> None:
("mean_total_steric_sea_level_anomaly_0-2000_yearly.nc", 17),
],
)
@pytest.mark.external_data
def test_cogify(tmp_path: Path, infile: str, num_cogs: int) -> None:
external_data_path = test_data.get_external_data(infile)
cogs = cog.cogify(external_data_path, str(tmp_path))
Expand All @@ -165,6 +170,7 @@ def test_cogify(tmp_path: Path, infile: str, num_cogs: int) -> None:
assert Path(c.asset().href).exists()


@pytest.mark.external_data
def test_cogify_href(tmp_path: Path) -> None:
href = (
"https://www.ncei.noaa.gov/data/oceans/ncei/archive/data"
Expand All @@ -178,6 +184,7 @@ def test_cogify_href(tmp_path: Path) -> None:
assert Path(c.asset().href).exists()


@pytest.mark.external_data
def test_cogify_href_no_output_directory() -> None:
href = (
"https://www.ncei.noaa.gov/data/oceans/ncei/archive/data"
Expand All @@ -187,12 +194,14 @@ def test_cogify_href_no_output_directory() -> None:
cog.cogify(href)


@pytest.mark.external_data
def test_unitless(tmp_path: Path) -> None:
path = test_data.get_external_data("mean_salinity_anomaly_0-2000_yearly.nc")
cogs = cog.cogify(path, str(tmp_path))
assert "unit" not in cogs[0].asset().extra_fields["raster:bands"][0]


@pytest.mark.external_data
def test_cogify_cog_href(tmp_path: Path) -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
cogs = cog.cogify(path, str(tmp_path))
Expand Down Expand Up @@ -222,6 +231,7 @@ def test_cogify_cog_href(tmp_path: Path) -> None:
("heat_content_anomaly_0-2000_seasonal.nc", 2005, "seasonal", 2000),
],
)
@pytest.mark.external_data
def test_create_netcdf_item(
infile: str, year: int, interval: str, max_depth: int
) -> None:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from pathlib import Path

import pytest
from pystac import Collection, Item

from tests import run_command, test_data
Expand All @@ -19,6 +20,7 @@ def test_create_collection(tmp_path: Path) -> None:
collection.validate()


@pytest.mark.external_data
def test_create_item(tmp_path: Path) -> None:
destination = tmp_path / "item.json"
infile = test_data.get_external_data("oisst-avhrr-v02r01.20220913.nc")
Expand All @@ -33,6 +35,7 @@ def test_create_item(tmp_path: Path) -> None:
item.validate()


@pytest.mark.external_data
def test_create_item_with_cogs(tmp_path: Path) -> None:
path = test_data.get_external_data("oisst-avhrr-v02r01.20220913.nc")
result = run_command(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import datetime
from pathlib import Path

import pytest
import stactools.noaa_cdr.stac
from dateutil.tz import tzutc
from pystac.extensions.projection import ProjectionExtension
from pystac.extensions.raster import RasterExtension

import stactools.noaa_cdr.stac
from stactools.noaa_cdr.sea_surface_temperature_optimum_interpolation import stac

from tests import test_data


Expand All @@ -20,6 +21,7 @@ def test_create_collection() -> None:
collection.validate()


@pytest.mark.external_data
def test_create_item() -> None:
path = test_data.get_external_data("oisst-avhrr-v02r01.20220913.nc")
item = stac.create_item(path)
Expand Down Expand Up @@ -58,6 +60,7 @@ def test_create_item() -> None:
item.validate()


@pytest.mark.external_data
def test_add_items(tmp_path: Path) -> None:
path = test_data.get_external_data("oisst-avhrr-v02r01.20220913.nc")
item = stac.create_item(path)
Expand Down
2 changes: 2 additions & 0 deletions tests/sea_surface_temperature_whoi/test_commands.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from pathlib import Path

import pytest
from pystac import Collection, ItemCollection

from .. import run_command, test_data


@pytest.mark.external_data
def test_create_cog_items(tmp_path: Path) -> None:
path = test_data.get_external_data(
"SEAFLUX-OSB-CDR_V02R00_SST_D20210831_C20211223.nc"
Expand Down
3 changes: 2 additions & 1 deletion tests/sea_surface_temperature_whoi/test_stac.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
from pathlib import Path

import pytest
from pystac.extensions.raster import RasterExtension
from pystac.extensions.scientific import ScientificExtension

from stactools.noaa_cdr.sea_surface_temperature_whoi import stac

from .. import test_data


@pytest.mark.external_data
def test_create_cog_items(tmp_path: Path) -> None:
path = test_data.get_external_data(
"SEAFLUX-OSB-CDR_V02R00_SST_D20210831_C20211223.nc"
Expand Down
3 changes: 2 additions & 1 deletion tests/test_dataset.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import pytest
import xarray

from stactools.noaa_cdr import dataset

from . import test_data


@pytest.mark.external_data
def test_data_variable_name() -> None:
path = test_data.get_external_data("heat_content_anomaly_0-2000_yearly.nc")
with xarray.open_dataset(path, decode_times=False) as ds:
Expand All @@ -23,6 +23,7 @@ def test_data_variable_name() -> None:
dataset.data_variable_name(ds)


@pytest.mark.external_data
def test_data_variable_names() -> None:
path = test_data.get_external_data("oisst-avhrr-v02r01.20220913.nc")
with xarray.open_dataset(path) as ds:
Expand Down

0 comments on commit 8e24411

Please sign in to comment.