From 6b9fe449053cde7007f49e98f398c0bcd16987e6 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Nov 2024 16:27:32 -0600 Subject: [PATCH 1/8] CI: Clean up and modernize linting workflow --- .github/workflows/linting.yml | 30 +++++++++++++++++------------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index d6d2932d1..851bd3fbd 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -3,40 +3,44 @@ on: push: branches: [main] pull_request: + branches: [main] + +concurrency: + group: ${{ github.workflow}}-${{ github.head_ref }} + cancel-in-progress: true jobs: - flake8: - name: Flake8 + lint: + name: Run Lint Tools runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.x + cache: 'pip' + cache-dependency-path: 'ci/linting_requirements.txt' - - name: Install flake8 + - name: Install linting tools run: python -m pip install -r ci/linting_requirements.txt - name: Set up reviewdog - run: | - mkdir -p $HOME/bin - curl -sfL \ - https://github.com/reviewdog/reviewdog/raw/master/install.sh | \ - sh -s -- -b $HOME/bin - echo "$HOME/bin" >> $GITHUB_PATH + uses: reviewdog/action-setup@v1 - name: Run flake8 env: REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | + set -o pipefail flake8 | reviewdog -f=pep8 -name=flake8 -reporter=github-check -filter-mode=nofilter - name: Run doc8 # Don't skip doc8 if flake8 fails - if: ${{ always() }} + if: always() env: REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | - doc8 docs | reviewdog -efm='%f:%l: %m' -name=doc8 -reporter=github-check -filter-mode=nofilter + set -o pipefail + doc8 docs | reviewdog -efm='%f:%l: %m' -name=doc8 -reporter=github-check -filter-mode=nofilter \ No newline at end of file From cc5f2822f718f6bdc4d31334f0819c98356fde19 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Nov 2024 16:53:48 -0600 Subject: [PATCH 2/8] MNT: Remove no-longer-needed setup.py Also update some docs around install, etc. --- docs/developerguide.rst | 17 ++++++----------- docs/installguide.rst | 18 ++++++------------ setup.py | 23 ----------------------- 3 files changed, 12 insertions(+), 46 deletions(-) delete mode 100644 setup.py diff --git a/docs/developerguide.rst b/docs/developerguide.rst index 99f4ec748..bfbd410ea 100644 --- a/docs/developerguide.rst +++ b/docs/developerguide.rst @@ -66,22 +66,17 @@ Testing Unit tests are the lifeblood of the project, as it ensures that we can continue to add and change the code and stay confident that things have not broken. Running the tests requires ``pytest``, which is easily available through ``conda`` or ``pip``. Running the tests can be -done via either: +done by: .. parsed-literal:: - python setup.py test + pytest tests -or - -.. parsed-literal:: - py.test - -Using ``py.test`` also gives you the option of passing a path to the directory with tests to +This gives you the option of passing a path to the directory with tests to run, which can speed running only the tests of interest when doing development. For instance, to only run the tests in the ``siphon/cdmr`` directory, use: .. parsed-literal:: - py.test siphon/cdmr + pytest siphon/cdmr ---------- Code Style @@ -142,7 +137,7 @@ To create a new release: 3. (optional) Perform a ``git clean -f -x -d`` from the root of the repository. This will **delete** everything not tracked by git, but will also ensure clean source distribution. ``MANIFEST.in`` is set to include/exclude mostly correctly, but could miss some things. -4. Run ``python setup.py sdist bdist_wheel`` (this requires ``wheel`` is installed). +4. Run ``python -m build`` (this requires that ``build`` is installed). 5. Upload using ``twine``: ``twine upload dist/*``, assuming the ``dist/`` directory contains only files for this release. This upload process will include any changes to the ``README`` - as well as any updated flags from ``setup.py``. + as well as any updated flags from ``pyproject.toml``. diff --git a/docs/installguide.rst b/docs/installguide.rst index d6dfa9021..1e235d44f 100644 --- a/docs/installguide.rst +++ b/docs/installguide.rst @@ -9,11 +9,10 @@ In general, Siphon tries to support minor versions of dependencies released with years. For Python itself, that means supporting the last two minor releases. Siphon currently supports the following versions of required dependencies: - - requests >= 1.2 - - numpy >= 1.8.0 - - protobuf >= 3.0.0 - - beautifulsoup4>=4.6 - - pandas + +.. literalinclude:: ../pyproject.toml + :start-at: beautifulsoup4 + :end-at: requests Installation Instructions for NumPy can be found at: https://numpy.org/install/ @@ -27,12 +26,7 @@ The easiest way to install Siphon is through ``pip``: .. parsed-literal:: pip install siphon -Siphon can also be installed through ``conda``: - -.. parsed-literal:: - conda install -c unidata siphon - -Additionally, Siphon can be installed with ``conda-forge``: +Siphon can also be installed through ``conda``, using the ``conda-forge`` channel: .. parsed-literal:: conda install -c conda-forge siphon @@ -41,7 +35,7 @@ The source code can also be grabbed from `GitHub Date: Fri, 1 Nov 2024 16:28:41 -0600 Subject: [PATCH 3/8] MNT: Switch to ruff as main linting tool Also switch over to isort for import order handling --- .github/workflows/linting.yml | 3 +++ ci/linting_requirements.txt | 21 ++++++---------- pyproject.toml | 40 +++++++++++++++++++++++++++++++ setup.cfg | 45 +++++++++-------------------------- src/siphon/__init__.py | 1 + 5 files changed, 62 insertions(+), 48 deletions(-) diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 851bd3fbd..3e3727922 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -29,6 +29,9 @@ jobs: - name: Set up reviewdog uses: reviewdog/action-setup@v1 + - name: Run ruff + run: ruff check --output-format github + - name: Run flake8 env: REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/ci/linting_requirements.txt b/ci/linting_requirements.txt index cdd8b3e93..8dfa68b07 100644 --- a/ci/linting_requirements.txt +++ b/ci/linting_requirements.txt @@ -1,22 +1,15 @@ +ruff==0.7.1 + flake8==5.0.4 pycodestyle==2.9.1 pyflakes==2.5.0 -flake8-bugbear==22.9.23 -flake8-builtins==1.5.3 -flake8-comprehensions==3.10.0 -flake8-copyright==0.2.3 -flake8-import-order==0.18.1 -flake8-mutable==1.2.0 -flake8-pep3101==1.3.0 -flake8-print==5.0.0 -flake8-quotes==3.3.1 -flake8-simplify==0.19.3 -pep8-naming==0.13.2 +flake8-continuation==1.0.5 +flake8-isort==6.1.1 +isort==5.13.2 +flake8-requirements==2.2.1 flake8-rst-docstrings==0.2.7 -flake8-docstrings==1.6.0 -pydocstyle==6.1.1 doc8==1.0.0 -restructuredtext_lint==1.4.0 +restructuredtext_lint==1.4.0 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index fdac8a76c..55c56cf15 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,5 +62,45 @@ extras = [ "Bug Tracker" = "https://github.com/Unidata/siphon/issues" "Source Code" = "https://github.com/Unidata/siphon" +[tool.ruff] +line-length = 95 +exclude = ["docs", "build", "src/siphon/cdmr/*_pb2.py"] +preview = true + +[tool.ruff.lint] +select = ["A", "B", "C", "CPY001", "D", "E", "E226", "F", "G", "I", "N", "NPY", "PIE", "Q", "R", "S", "SIM", "T", "U", "W"] +# NPY201 ignores the use of 'trapz' false alarm +ignore = ["F405", "I001", "NPY201", "RET504", "RET505", "RET506", "RET507", "RUF100", "S314"] +explicit-preview-rules = true + +[tool.ruff.lint.per-file-ignores] +"ci/filter_links.py" = ["E731", "T201", "S603", "S607"] +"examples/*.py" = ["D", "T201", "B018"] +"tests/*/*.py" = ["S101"] +"tests/test_*.py" = ["S101"] + +[tool.ruff.lint.flake8-copyright] +# Needed to avoid need for spaces after a comma +notice-rgx = "(?i)Copyright\\s+(\\(C\\)\\s+)?\\d{4}([-,]\\d{4})*" +author = "Siphon Contributors" + +[tool.ruff.lint.flake8-quotes] +inline-quotes = "single" +multiline-quotes = "double" + +[tool.ruff.lint.isort] +known-first-party = ["siphon"] +force-single-line = false +relative-imports-order = "closest-to-furthest" +force-sort-within-sections = true +order-by-type = false +combine-as-imports = true + +[tool.ruff.lint.mccabe] +max-complexity = 61 + +[tool.ruff.lint.pydocstyle] +convention = "numpy" + [tool.setuptools_scm] version_scheme = "post-release" \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index f97fd3b58..313082671 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,38 +1,15 @@ +[pycodestyle] +ignore = W503 +max-line-length = 95 + [flake8] max-line-length = 95 -application-import-names = siphon -import-order-style = google -copyright-check = True -copyright-author = Siphon Contributors -inline-quotes = single -multiline-quotes = double rst-roles = class, data, doc, func, meth, mod rst-directives = plot, versionchanged -docstring-convention = numpy -exclude = - docs - build - src/siphon/cdmr/ncStream_pb2.py - src/siphon/cdmr/cdmrfeature_pb2.py -select = A B C D E F H I M Q RST S T W B902 -ignore = F405 W503 RST902 SIM -per-file-ignores = examples/*.py: D T201 - tutorials/*.py: D T201 - -[tool:pytest] -norecursedirs = build docs - -[doc8] -ignore-path = docs/build,docs/api -max-line-length = 95 - -[bdist_wheel] -# This flag says that the code is written to work on both Python 2 and 3. -universal=1 - -[aliases] -test = pytest - -[yapf] -based_on_style = pep8 -column_limit = 90 +known-modules = netcdf4:[netCDF4] +exclude = docs build +select = E301 E302 E303 E304 E305 E306 I R +ignore = F405 W503 RST902 SIM106 +per-file-ignores = src/siphon/_version.py: I900 + src/siphon/testing.py: I900 + ci/filter_links.py: E731 diff --git a/src/siphon/__init__.py b/src/siphon/__init__.py index e3ae2d6cb..ff112fea9 100644 --- a/src/siphon/__init__.py +++ b/src/siphon/__init__.py @@ -5,5 +5,6 @@ # Version import needs to come first so everyone else can pull on import from ._version import get_version + __version__ = get_version() del get_version From 83e1cd16433a264233ca29bc2aa5e4930892af7c Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Nov 2024 16:31:49 -0600 Subject: [PATCH 4/8] MNT: Clean up some lint found by ruff --- ci/download_cartopy_maps.py | 2 +- ci/filter_links.py | 2 +- ci/gen_versions_json.py | 4 +- pyproject.toml | 21 ++++++++++ setup.cfg | 2 +- src/siphon/catalog.py | 53 +++++++++++--------------- src/siphon/cdmr/coveragedataset.py | 2 +- src/siphon/cdmr/dataset.py | 17 +++------ src/siphon/cdmr/ncstream.py | 10 ++--- src/siphon/cdmr/xarray_support.py | 1 + src/siphon/http_util.py | 8 +--- src/siphon/metadata.py | 21 +++------- src/siphon/ncss.py | 9 +++-- src/siphon/ncss_dataset.py | 8 ++-- src/siphon/radarserver.py | 4 +- src/siphon/simplewebservice/acis.py | 15 ++++---- src/siphon/simplewebservice/igra2.py | 28 ++++++-------- src/siphon/simplewebservice/ndbc.py | 9 ++--- src/siphon/simplewebservice/wyoming.py | 9 ++--- tests/cdmr/test_cdmremote.py | 2 +- tests/cdmr/test_cdmremotefeature.py | 2 +- tests/cdmr/test_dataset.py | 6 +-- tests/test_catalog_access.py | 4 +- tests/test_http_util.py | 4 +- tests/test_iastate.py | 1 - tests/test_igra2.py | 5 +-- tests/test_ncss_dataset.py | 4 +- tests/test_ndbc.py | 1 - tests/test_radarsever.py | 3 +- tests/test_wyoming.py | 1 - 30 files changed, 117 insertions(+), 141 deletions(-) diff --git a/ci/download_cartopy_maps.py b/ci/download_cartopy_maps.py index 310eff218..1d32783b7 100755 --- a/ci/download_cartopy_maps.py +++ b/ci/download_cartopy_maps.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2021 MetPy Developers. +# Copyright (c) 2021 Siphon Contributors. """Explicitly download needed Cartopy maps.""" from cartopy.io import config, Downloader diff --git a/ci/filter_links.py b/ci/filter_links.py index c03e791bc..86a95b848 100755 --- a/ci/filter_links.py +++ b/ci/filter_links.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2021 MetPy Developers. +# Copyright (c) 2021 Siphon Contributors. """Filter links from Sphinx's linkcheck.""" import json import subprocess diff --git a/ci/gen_versions_json.py b/ci/gen_versions_json.py index 3200859c3..fb35a209d 100755 --- a/ci/gen_versions_json.py +++ b/ci/gen_versions_json.py @@ -6,6 +6,6 @@ import glob -with open('versions.json', 'wt') as version_file: - version_strings = ','.join('"{}"'.format(d) for d in glob.glob('v*.[0-9]*')) +with open('versions.json', 'w') as version_file: + version_strings = ','.join(f'"{d}"' for d in glob.glob('v*.[0-9]*')) version_file.write('{"versions":["latest","dev",' + version_strings + ']}\n') diff --git a/pyproject.toml b/pyproject.toml index 55c56cf15..662c958db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,27 @@ extras = [ "Bug Tracker" = "https://github.com/Unidata/siphon/issues" "Source Code" = "https://github.com/Unidata/siphon" +[tool.doc8] +ignore-path = ["docs/build", "docs/api/generated", "docs/_templates", "docs/examples"] +file-encoding = "utf8" +max-line-length = 95 + +[tool.isort] +line_length = 95 +known_first_party = ["siphon"] +force_single_line = false +reverse_relative = true +use_parentheses = true +force_sort_within_sections = true +order_by_type = false +sort_relative_in_force_sorted_sections = true +combine_as_imports = true +combine_star = true + +[tool.pytest.ini_options] +norecursedirs = "build docs .idea" +doctest_optionflags = "NORMALIZE_WHITESPACE" + [tool.ruff] line-length = 95 exclude = ["docs", "build", "src/siphon/cdmr/*_pb2.py"] diff --git a/setup.cfg b/setup.cfg index 313082671..277e53af3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,7 @@ max-line-length = 95 rst-roles = class, data, doc, func, meth, mod rst-directives = plot, versionchanged known-modules = netcdf4:[netCDF4] -exclude = docs build +exclude = docs build src/siphon/cdmr/*_pb2.py select = E301 E302 E303 E304 E305 E306 I R ignore = F405 W503 RST902 SIM106 per-file-ignores = src/siphon/_version.py: I900 diff --git a/src/siphon/catalog.py b/src/siphon/catalog.py index 820c2547e..5c9950a7d 100644 --- a/src/siphon/catalog.py +++ b/src/siphon/catalog.py @@ -11,13 +11,9 @@ from datetime import datetime import logging import re +from urllib.parse import urljoin, urlparse import warnings import xml.etree.ElementTree as ET # noqa:N814 -try: - from urlparse import urljoin, urlparse -except ImportError: - # Python 3 - from urllib.parse import urljoin, urlparse from .http_util import session_manager from .metadata import TDSCatalogMetadata @@ -47,10 +43,7 @@ class DatasetCollection(IndexableMapping): def _get_datasets_with_times(self, regex, strptime=None): # Set the default regex if we don't have one # If strptime is provided, pass the regex group named 'strptime' to strptime - if regex is None: - regex = self.default_regex - else: - regex = re.compile(regex) + regex = self.default_regex if regex is None else re.compile(regex) # Loop over the collection looking for keys that match our regex found_date = False @@ -149,7 +142,7 @@ def filter_time_range(self, start, end, regex=None, strptime=None): """ if start > end: warnings.warn('The provided start time comes after the end time. No data will ' - 'be returned.', UserWarning) + 'be returned.', UserWarning, stacklevel=2) return [item[-1] for item in self._get_datasets_with_times(regex, strptime) if start <= item[0] <= end] @@ -289,8 +282,8 @@ def __init__(self, catalog_url): if 'html' in resp.headers['content-type']: import warnings new_url = self.catalog_url.replace('html', 'xml') - warnings.warn('URL {} returned HTML. Changing to: {}'.format(self.catalog_url, - new_url)) + warnings.warn(f'URL {self.catalog_url} returned HTML. Changing to: {new_url}', + stacklevel=2) self.catalog_url = new_url resp = self.session.get(self.catalog_url) resp.raise_for_status() @@ -362,9 +355,8 @@ def __del__(self): def _process_dataset(self, element): catalog_url = '' - if 'urlPath' in element.attrib: - if element.attrib['urlPath'] == 'latest.xml': - catalog_url = self.catalog_url + if 'urlPath' in element.attrib and element.attrib['urlPath'] == 'latest.xml': + catalog_url = self.catalog_url ds = Dataset(element, catalog_url=catalog_url) self.datasets[ds.name] = ds @@ -524,24 +516,21 @@ def resolve_url(self, catalog_url): resolver_xml = session_manager.urlopen(resolver_url) tree = ET.parse(resolver_xml) root = tree.getroot() - if 'name' in root.attrib: - self.catalog_name = root.attrib['name'] - else: - self.catalog_name = 'No name found' + self.catalog_name = root.attrib.get('name', 'No name found') resolved_url = '' found = False for child in root.iter(): if not found: tag_type = child.tag.split('}')[-1] - if tag_type == 'dataset': - if 'urlPath' in child.attrib: - ds = Dataset(child) - resolved_url = ds.url_path - found = True + if tag_type == 'dataset' and 'urlPath' in child.attrib: + ds = Dataset(child) + resolved_url = ds.url_path + found = True if found: return resolved_url else: log.warning('no dataset url path found in latest.xml!') + return None def make_access_urls(self, catalog_url, all_services, metadata=None): """Make fully qualified urls for the access methods enabled on the dataset. @@ -611,9 +600,8 @@ def download(self, filename=None): """ if filename is None: filename = self.name - with self.remote_open() as infile: - with open(filename, 'wb') as outfile: - outfile.write(infile.read()) + with self.remote_open() as infile, open(filename, 'wb') as outfile: + outfile.write(infile.read()) def remote_open(self, mode='b', encoding='ascii', errors='ignore'): """Open the remote dataset for random access. @@ -727,7 +715,8 @@ def access_with_service(self, service, use_xarray=None): import xarray as xr provider = lambda url: xr.open_dataset(CDMRemoteStore(url)) # noqa: E731 except ImportError: - raise ImportError('CdmRemote access needs xarray to be installed.') + raise ImportError('CdmRemote access needs xarray' + 'to be installed.') from None else: from .cdmr import Dataset as CDMRDataset provider = CDMRDataset @@ -737,13 +726,15 @@ def access_with_service(self, service, use_xarray=None): import xarray as xr provider = xr.open_dataset except ImportError: - raise ImportError('xarray needs to be installed if `use_xarray` is True.') + raise ImportError('xarray needs to be installed if ' + '`use_xarray` is True.') from None else: try: from netCDF4 import Dataset as NC4Dataset provider = NC4Dataset except ImportError: - raise ImportError('OPENDAP access needs netCDF4-python to be installed.') + raise ImportError('OPENDAP access needs netCDF4-python' + 'to be installed.') from None elif service in self.ncss_service_names: from .ncss import NCSS provider = NCSS @@ -755,7 +746,7 @@ def access_with_service(self, service, use_xarray=None): try: return provider(self.access_urls[service]) except KeyError: - raise ValueError(service + ' is not available for this dataset') + raise ValueError(service + ' is not available for this dataset') from None __repr__ = __str__ diff --git a/src/siphon/cdmr/coveragedataset.py b/src/siphon/cdmr/coveragedataset.py index 599c434ce..5c1ad533e 100644 --- a/src/siphon/cdmr/coveragedataset.py +++ b/src/siphon/cdmr/coveragedataset.py @@ -29,7 +29,7 @@ def __init__(self, url): """Initialize CoverageDataset from a url pointing to CDMRemoteFeature endpoint.""" super().__init__() warnings.warn('CoverageDataset is in early development, unsupported, and API may ' - 'change at any time.') + 'change at any time.', stacklevel=2) self.cdmrf = CDMRemoteFeature(url) self.name = 'Unnamed' self.lon_lat_domain = None diff --git a/src/siphon/cdmr/dataset.py b/src/siphon/cdmr/dataset.py index 24d9299b0..c62e8e66e 100644 --- a/src/siphon/cdmr/dataset.py +++ b/src/siphon/cdmr/dataset.py @@ -223,10 +223,9 @@ def _process_indices(self, ind): except TypeError: ind = [ind] - # Make sure we don't have too many things to index - if len(ind) > self.ndim: - # But allow a full slice on a scalar variable - if not (self.ndim == 0 and len(ind) == 1 and ind[0] == slice(None)): + # Make sure we don't have too many things to index, but allow a full slice on + # a scalar variable + if len(ind) > self.ndim and (self.ndim != 0 or len(ind) != 1 or ind[0] != slice(None)): raise IndexError('Too many dimensions to index.') # Expand to a slice/ellipsis for every dimension @@ -260,15 +259,9 @@ def _process_indices(self, ind): # Adjust start and stop to handle negative indexing # and partial support for slicing beyond end. - if i.start is None: - start = 0 - else: - start = self._adjust_index(dim, i.start) + start = 0 if i.start is None else self._adjust_index(dim, i.start) - if i.stop is None: - stop = self.shape[dim] - else: - stop = self._adjust_index(dim, i.stop) + stop = self.shape[dim] if i.stop is None else self._adjust_index(dim, i.stop) # Need to create new slice for adjusted values ind[dim] = slice(start, stop, i.step) diff --git a/src/siphon/cdmr/ncstream.py b/src/siphon/cdmr/ncstream.py index 9f52dd243..e74f5a5cd 100644 --- a/src/siphon/cdmr/ncstream.py +++ b/src/siphon/cdmr/ncstream.py @@ -10,8 +10,7 @@ import numpy as np -from . import cdmrfeature_pb2 as cdmrf -from . import ncStream_pb2 as stream # noqa +from . import cdmrfeature_pb2 as cdmrf, ncStream_pb2 as stream MAGIC_HEADER = b'\xad\xec\xce\xda' MAGIC_DATA = b'\xab\xec\xce\xba' @@ -58,7 +57,9 @@ def read_ncstream_data(fobj): # Handle decompressing the bytes if data.compress == stream.DEFLATE: bin_data = zlib.decompress(bin_data) - assert len(bin_data) == data.uncompressedSize + if len(bin_data) != data.uncompressedSize: + log.error('Uncompressed size mismatch %d vs. %d', len(bin_data), + data.uncompressedSize) elif data.compress != stream.NONE: raise NotImplementedError(f'Compression type {data.compress} not implemented!') @@ -140,8 +141,7 @@ def read_messages(fobj, magic_table): if func is not None: messages.append(func(fobj)) else: - log.error('Unknown magic: ' + str(' '.join(f'{b: 02x}' - for b in bytearray(magic)))) + log.error('Unknown magic: %s', ' '.join(f'{b: 02x}' for b in bytearray(magic))) return messages diff --git a/src/siphon/cdmr/xarray_support.py b/src/siphon/cdmr/xarray_support.py index d956621dd..698a8f751 100644 --- a/src/siphon/cdmr/xarray_support.py +++ b/src/siphon/cdmr/xarray_support.py @@ -6,6 +6,7 @@ from xarray import Variable from xarray.backends.common import AbstractDataStore, BackendArray from xarray.core import indexing + try: from xarray.core.utils import FrozenDict except ImportError: diff --git a/src/siphon/http_util.py b/src/siphon/http_util.py index 1bc5d630d..ff859cee6 100644 --- a/src/siphon/http_util.py +++ b/src/siphon/http_util.py @@ -9,12 +9,8 @@ from io import BytesIO from itertools import chain import posixpath +from urllib.parse import urlencode, urljoin # noqa: F401 import warnings -try: - from urllib.parse import urlencode, urljoin # noqa -except ImportError: - from urllib import urlencode - from urlparse import urljoin # noqa import requests @@ -324,7 +320,7 @@ def time_range(self, start, end): """ if start > end: warnings.warn('The provided start time comes after the end time. No data will ' - 'be returned.', UserWarning) + 'be returned.', UserWarning, stacklevel=2) self._set_query(self.time_query, time_start=self._format_time(start), time_end=self._format_time(end)) return self diff --git a/src/siphon/metadata.py b/src/siphon/metadata.py index dc22a3a08..4a2d70dd6 100644 --- a/src/siphon/metadata.py +++ b/src/siphon/metadata.py @@ -166,10 +166,7 @@ def handle_dataType(self, element): # noqa class _ComplexTypes: @staticmethod def _get_tag_name(element): - if '}' in element.tag: - element_name = element.tag.split('}')[-1] - else: - element_name = element.tag + element_name = element.tag.split('}')[-1] if '}' in element.tag else element.tag return element_name @staticmethod @@ -468,10 +465,7 @@ def __init__(self, element, metadata_in=None): inherited = False if 'inherited' in element.attrib: inherited = element.attrib['inherited'] - if inherited == 'true': - inherited = True - else: - inherited = False + inherited = inherited == 'true' if metadata_in and (inherited or self._is_external_metadata_doc(element)): # only inherit metadata passed in if the new metadata @@ -491,10 +485,7 @@ def __init__(self, element, metadata_in=None): @staticmethod def _get_tag_name(element): - if '}' in element.tag: - element_name = element.tag.split('}')[-1] - else: - element_name = element.tag + element_name = element.tag.split('}')[-1] if '}' in element.tag else element.tag return element_name @staticmethod @@ -512,6 +503,7 @@ def _get_handler(self, handler_name): return getattr(self._st, handler_name) else: log.warning('cannot find handler for element %s', handler_name) + return None def _parse_element(self, element): @@ -572,10 +564,7 @@ def _parse_documentation(self, element): md = self.metadata md.setdefault('documentation', {}) if known or plain_doc: - if known: - doc_type = element.attrib['type'] - else: - doc_type = 'generic' + doc_type = element.attrib['type'] if known else 'generic' md['documentation'].setdefault(doc_type, []).append(element.text) elif xlink_href_attr in element.attrib: title = element.attrib[xlink_title_attr] diff --git a/src/siphon/ncss.py b/src/siphon/ncss.py index 71dbb2889..b30fde1a6 100644 --- a/src/siphon/ncss.py +++ b/src/siphon/ncss.py @@ -347,7 +347,7 @@ def combine_xml_points(seq, units, handle_units): def parse_xml_dataset(elem, handle_units): """Create a netCDF-like dataset from XML data.""" - points, units = zip(*[parse_xml_point(p) for p in elem.findall('point')]) + points, units = zip(*[parse_xml_point(p) for p in elem.findall('point')], strict=False) # Group points by the contents of each point datasets = {} for p in points: @@ -359,9 +359,10 @@ def parse_xml_dataset(elem, handle_units): # Handling of netCDF 3/4 from NCSS try: - from netCDF4 import Dataset from tempfile import NamedTemporaryFile + from netCDF4 import Dataset + @response_handlers.register('application/x-netcdf') @response_handlers.register('application/x-netcdf4') def read_netcdf(data, handle_units): # pylint:disable=unused-argument @@ -381,7 +382,7 @@ def read_netcdf(data, handle_units): # pylint:disable=unused-argument except ImportError: import warnings warnings.warn('netCDF4 module not installed. ' - 'Will be unable to handle NetCDF returns from NCSS.') + 'Will be unable to handle NetCDF returns from NCSS.', stacklevel=2) def deletetempfile(fname): @@ -395,7 +396,7 @@ def deletetempfile(fname): import warnings warnings.warn('temporary netcdf dataset file not deleted. ' 'to delete temporary dataset file in the future ' - 'be sure to use dataset.close() when finished.') + 'be sure to use dataset.close() when finished.', stacklevel=2) # Parsing of CSV data returned from NCSS diff --git a/src/siphon/ncss_dataset.py b/src/siphon/ncss_dataset.py index 2384a82ce..9a2d7e1a3 100644 --- a/src/siphon/ncss_dataset.py +++ b/src/siphon/ncss_dataset.py @@ -91,7 +91,7 @@ def handle_typed_values(val, type_name, value_type): for potential_bool in val: if potential_bool not in ['true', 'false']: raise ValueError - val = [True if item == 'true' else False for item in val] + val = [item == 'true' for item in val] except ValueError: msg = 'Cannot convert values %s to boolean.' msg += ' Keeping type as str.' @@ -201,6 +201,7 @@ def lookup(self, handler_name): return getattr(self, handler_name) else: log.warning('cannot find handler for element %s', handler_name) + return None class NCSSDataset: @@ -281,9 +282,8 @@ def __init__(self, element): things_to_del = [] for thing in self.__dict__: - if not (thing.startswith('_') or thing.startswith('__')): - if not getattr(self, thing): - things_to_del.append(thing) + if not (thing.startswith(('_', '__'))) and not getattr(self, thing): + things_to_del.append(thing) for thing in things_to_del: delattr(self, thing) diff --git a/src/siphon/radarserver.py b/src/siphon/radarserver.py index 6f3fe666b..45b9ce00b 100644 --- a/src/siphon/radarserver.py +++ b/src/siphon/radarserver.py @@ -157,8 +157,8 @@ def get_catalog(self, query): url = self._base[:-1] if self._base[-1] == '/' else self._base url += '?' + str(query) return TDSCatalog(url) - except ET.ParseError: - raise BadQueryError(self.get_catalog_raw(query)) + except ET.ParseError as e: + raise BadQueryError(self.get_catalog_raw(query)) from e def get_catalog_raw(self, query): """Fetch THREDDS catalog XML from the radar server. diff --git a/src/siphon/simplewebservice/acis.py b/src/siphon/simplewebservice/acis.py index 045a3d5c7..a7b8cbe07 100644 --- a/src/siphon/simplewebservice/acis.py +++ b/src/siphon/simplewebservice/acis.py @@ -54,16 +54,15 @@ def acis_request(method, params): response = session_manager.create_session().post(base_url + method, json=params, timeout=timeout) return response.json() - except requests.exceptions.Timeout: - raise AcisApiException('Connection Timeout') - except requests.exceptions.TooManyRedirects: - raise AcisApiException('Bad URL. Check your ACIS connection method string.') - except ValueError: + except requests.exceptions.Timeout as e: + raise AcisApiException('Connection Timeout') from e + except requests.exceptions.TooManyRedirects as e: + raise AcisApiException('Bad URL. Check your ACIS connection method string.') from e + except ValueError as e: raise AcisApiException('No data returned! The ACIS parameter dictionary' - 'may be incorrectly formatted') + 'may be incorrectly formatted') from e -class AcisApiException(Exception): +class AcisApiException(Exception): # noqa: N818 """Handle exceptions raised by the acis_request function.""" - pass diff --git a/src/siphon/simplewebservice/igra2.py b/src/siphon/simplewebservice/igra2.py index 673ecae82..deafbe2f8 100644 --- a/src/siphon/simplewebservice/igra2.py +++ b/src/siphon/simplewebservice/igra2.py @@ -4,8 +4,7 @@ """Read upper air data from the Integrated Global Radiosonde Archive version 2.""" import datetime -from io import BytesIO -from io import StringIO +from io import BytesIO, StringIO import itertools import sys import warnings @@ -60,7 +59,7 @@ def request_data(cls, time, site_id, derived=False): igra2.folder = 'data/data-por/' igra2.suffix = igra2.suffix + '-data.txt' - if type(time) == datetime.datetime: + if isinstance(time, datetime.datetime): igra2.begin_date = time igra2.end_date = time else: @@ -109,19 +108,16 @@ def _get_data_raw(self): # Get the data and handle if there is none matching what was requested try: resp = self.get_path(path) - except HTTPError: - raise ValueError('No data available for {time:%Y-%m-%d %HZ} ' - 'for station {stid}.'.format(time=self.begin_date, - stid=self.site_id)) + except HTTPError as e: + raise ValueError(f'No data available for {self.begin_date:%Y-%m-%d %HZ} ' + f'for station {self.site_id}.') from e file_info = ZipFile(BytesIO(resp.content)).infolist()[0] - f = ZipFile(BytesIO(resp.content)).open(file_info) - - lines = [line.decode('utf-8') for line in f.readlines()] - - body, header, dates_long, dates = self._select_date_range(lines) - - return body, header, dates_long, dates + with ZipFile(BytesIO(resp.content)) as zf: + f = zf.open(file_info) + lines = [line.decode('utf-8') for line in f.readlines()] + body, header, dates_long, dates = self._select_date_range(lines) + return body, header, dates_long, dates def _select_date_range(self, lines): """Identify lines containing headers within the range begin_date to end_date. @@ -206,9 +202,7 @@ def _ctime(strformat='MMMSS'): def _ctime_strformat(val): time = val.strip().zfill(5) - if int(time) < 0: - return np.nan - elif int(time) == 9999: + if int(time) < 0 or int(time) == 9999: return np.nan else: if strformat == 'MMMSS': diff --git a/src/siphon/simplewebservice/ndbc.py b/src/siphon/simplewebservice/ndbc.py index 9e19476ae..fcf6331dd 100644 --- a/src/siphon/simplewebservice/ndbc.py +++ b/src/siphon/simplewebservice/ndbc.py @@ -12,7 +12,7 @@ from ..http_util import HTTPEndPoint -warnings.filterwarnings('ignore', "Pandas doesn\'t allow columns to be created", UserWarning) +warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) class NDBC(HTTPEndPoint): @@ -472,11 +472,8 @@ def _check_if_url_valid(url): bool if url is valid """ - r = requests.head(url) - if r.status_code == 200: - return True - else: - return False + r = requests.head(url, timeout=300) + return r.status_code == 200 @classmethod def buoy_data_types(cls, buoy): diff --git a/src/siphon/simplewebservice/wyoming.py b/src/siphon/simplewebservice/wyoming.py index 305ba8e0c..d8f0e8f89 100644 --- a/src/siphon/simplewebservice/wyoming.py +++ b/src/siphon/simplewebservice/wyoming.py @@ -14,7 +14,6 @@ from .._tools import get_wind_components from ..http_util import HTTPEndPoint - warnings.filterwarnings('ignore', "Pandas doesn't allow columns to be created", UserWarning) @@ -140,14 +139,14 @@ def _get_data_raw(self, time, site_id): """ path = ('?region=naconf&TYPE=TEXT%3ALIST' - '&YEAR={time:%Y}&MONTH={time:%m}&FROM={time:%d%H}&TO={time:%d%H}' - '&STNM={stid}').format(time=time, stid=site_id) + f'&YEAR={time:%Y}&MONTH={time:%m}&FROM={time:%d%H}&TO={time:%d%H}' + f'&STNM={site_id}') resp = self.get_path(path) # See if the return is valid, but has no data if resp.text.find("Can't") != -1: raise ValueError( - 'No data available for {time:%Y-%m-%d %HZ} ' - 'for station {stid}.'.format(time=time, stid=site_id)) + f'No data available for {time:%Y-%m-%d %HZ} ' + f'for station {site_id}.') return resp.text diff --git a/tests/cdmr/test_cdmremote.py b/tests/cdmr/test_cdmremote.py index 72019ca0b..1cd45b29f 100644 --- a/tests/cdmr/test_cdmremote.py +++ b/tests/cdmr/test_cdmremote.py @@ -9,7 +9,7 @@ recorder = get_recorder(__file__) -class TestCDMRmote(object): +class TestCDMRmote: """Test the CDMRemote HTTP interface.""" def setup(self): diff --git a/tests/cdmr/test_cdmremotefeature.py b/tests/cdmr/test_cdmremotefeature.py index 91f7d818e..dda791cf4 100644 --- a/tests/cdmr/test_cdmremotefeature.py +++ b/tests/cdmr/test_cdmremotefeature.py @@ -11,7 +11,7 @@ recorder = get_recorder(__file__) -class TestCDMRemoteFeature(object): +class TestCDMRemoteFeature: """Test the CDMRemoteFeature HTTP interface.""" @recorder.use_cassette('cdmrf_header') diff --git a/tests/cdmr/test_dataset.py b/tests/cdmr/test_dataset.py index 0f71e5e15..120e841f7 100644 --- a/tests/cdmr/test_dataset.py +++ b/tests/cdmr/test_dataset.py @@ -19,7 +19,7 @@ def get_fixed_url(): 'grib/NCEP/RAP/CONUS_13km/RR_CONUS_13km_20150518_1200.grib2/GC') -class TestDataset(object): +class TestDataset: """Test basic Dataset functionality.""" @classmethod @@ -387,7 +387,7 @@ def test_var_print(): assert s == truth -class TestIndexing(object): +class TestIndexing: """Test indexing on a variable makes the correct request.""" @classmethod @@ -445,7 +445,7 @@ def test_all_indices(self): def test_slice_to_end(self): """Test slicing to the end of a dimension.""" subset = self.var[0, 0, :3, :] - assert subset.shape, (3 == self.var.shape[-1]) + assert subset.shape, (self.var.shape[-1] == 3) @recorder.use_cassette('rap_ncstream_slice_beyond_end') def test_slices_long(self): diff --git a/tests/test_catalog_access.py b/tests/test_catalog_access.py index f2b117e1d..1e1b93c1d 100644 --- a/tests/test_catalog_access.py +++ b/tests/test_catalog_access.py @@ -174,12 +174,12 @@ def test_case_insensitive_access(caplog): cat = TDSCatalog(url) access_name = list(cat.datasets[0].access_urls.keys())[0] assert access_name == 'HTTPSERVER' # test __eq__ - assert not access_name != 'HTTPSERVER' # test __eq__ + assert access_name == 'HTTPSERVER' # test __eq__ assert access_name > 'a' # test __gt__ assert access_name >= 'a' # test __ge__ assert access_name < 'Z' # test __lt__ assert access_name <= 'Z' # test __le__ - assert not access_name == 1 # test fail on _try_lower + assert access_name != 1 # test fail on _try_lower assert 'Could not convert 1 to lowercase.' in caplog.text diff --git a/tests/test_http_util.py b/tests/test_http_util.py index 188301f03..f3b4b4754 100644 --- a/tests/test_http_util.py +++ b/tests/test_http_util.py @@ -7,8 +7,8 @@ import pytest -from siphon.http_util import (DataQuery, HTTPEndPoint, HTTPError, - parse_iso_date, session_manager, utc) +from siphon.http_util import (DataQuery, HTTPEndPoint, HTTPError, parse_iso_date, + session_manager, utc) import siphon.testing recorder = siphon.testing.get_recorder(__file__) diff --git a/tests/test_iastate.py b/tests/test_iastate.py index c8e252c8e..b8335cdd7 100644 --- a/tests/test_iastate.py +++ b/tests/test_iastate.py @@ -11,7 +11,6 @@ from siphon.simplewebservice.iastate import IAStateUpperAir from siphon.testing import get_recorder - recorder = get_recorder(__file__) diff --git a/tests/test_igra2.py b/tests/test_igra2.py index 789686c1c..58052ae1f 100644 --- a/tests/test_igra2.py +++ b/tests/test_igra2.py @@ -11,7 +11,6 @@ from siphon.simplewebservice.igra2 import IGRAUpperAir from siphon.testing import get_recorder - recorder = get_recorder(__file__) @@ -61,7 +60,7 @@ def subsetter(response): before_record_response=subset_date(datetime(2010, 6, 1))) def test_igra2(): """Test that we are properly parsing data from the IGRA2 archive.""" - df, header = IGRAUpperAir.request_data(datetime(2010, 6, 1, 12), 'USM00070026') + df, _header = IGRAUpperAir.request_data(datetime(2010, 6, 1, 12), 'USM00070026') assert_almost_equal(df['lvltyp1'][5], 1, 1) assert_almost_equal(df['lvltyp2'][5], 0, 1) @@ -94,7 +93,7 @@ def test_igra2(): before_record_response=subset_date(datetime(2014, 9, 10))) def test_igra2_drvd(): """Test that we are properly parsing data from the IGRA2 archive.""" - df, header = IGRAUpperAir.request_data(datetime(2014, 9, 10, 0), + df, _header = IGRAUpperAir.request_data(datetime(2014, 9, 10, 0), 'USM00070026', derived=True) assert_almost_equal(df['pressure'][5], 947.43, 2) diff --git a/tests/test_ncss_dataset.py b/tests/test_ncss_dataset.py index f35a8a41f..592621441 100644 --- a/tests/test_ncss_dataset.py +++ b/tests/test_ncss_dataset.py @@ -119,7 +119,7 @@ def test_attribute_float(self): """Test parsing a float value attribute.""" xml = '' element = ET.fromstring(xml) - expected = {'missing_value': [float(-999.0)]} + expected = {'missing_value': [-999.0]} actual = self.types.handle_attribute(element) assert expected == actual @@ -210,7 +210,7 @@ def test_attribute_boolean_invalid(self, caplog): element = ET.fromstring(xml) expected = {'missing_value': ['a']} actual = self.types.handle_attribute(element) - assert "Cannot convert values [\'a\'] to boolean. Keeping type as str." in caplog.text + assert "Cannot convert values ['a'] to boolean. Keeping type as str." in caplog.text assert expected == actual def test_value_1(self): diff --git a/tests/test_ndbc.py b/tests/test_ndbc.py index c0f3b3703..f42d7384b 100644 --- a/tests/test_ndbc.py +++ b/tests/test_ndbc.py @@ -13,7 +13,6 @@ from siphon.simplewebservice.ndbc import NDBC from siphon.testing import get_recorder - recorder = get_recorder(__file__) diff --git a/tests/test_radarsever.py b/tests/test_radarsever.py index 58d4e5d90..1a2f64476 100644 --- a/tests/test_radarsever.py +++ b/tests/test_radarsever.py @@ -8,8 +8,7 @@ import pytest from requests import HTTPError -from siphon.radarserver import (BadQueryError, get_radarserver_datasets, RadarQuery, - RadarServer) +from siphon.radarserver import BadQueryError, get_radarserver_datasets, RadarQuery, RadarServer import siphon.testing recorder = siphon.testing.get_recorder(__file__) diff --git a/tests/test_wyoming.py b/tests/test_wyoming.py index 44b58a7d0..3a648cca7 100644 --- a/tests/test_wyoming.py +++ b/tests/test_wyoming.py @@ -11,7 +11,6 @@ from siphon.simplewebservice.wyoming import WyomingUpperAir from siphon.testing import get_recorder - recorder = get_recorder(__file__) From 7649560043431653be251de43df4b29d67271934 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 4 Nov 2024 14:12:45 -0700 Subject: [PATCH 5/8] MNT: Fix doc8 lint error --- docs/api/simplewebservice.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/simplewebservice.rst b/docs/api/simplewebservice.rst index e4196272a..de82aa905 100644 --- a/docs/api/simplewebservice.rst +++ b/docs/api/simplewebservice.rst @@ -39,4 +39,4 @@ =================================== .. automodule:: siphon.simplewebservice.ndbc :members: - :special-members: __init__ \ No newline at end of file + :special-members: __init__ From 45a730c77b69d68901ba41dccca567dfb70cc739 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Fri, 1 Nov 2024 16:32:15 -0600 Subject: [PATCH 6/8] MNT: Add codespell configuration and infrastructure Adds codespell to our collection of linting tools. --- .codespellexclude | 0 .codespellignore | 1 + .github/workflows/linting.yml | 11 ++++++++++- ci/linting_requirements.txt | 4 +++- pyproject.toml | 5 +++++ 5 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 .codespellexclude create mode 100644 .codespellignore diff --git a/.codespellexclude b/.codespellexclude new file mode 100644 index 000000000..e69de29bb diff --git a/.codespellignore b/.codespellignore new file mode 100644 index 000000000..e6280f217 --- /dev/null +++ b/.codespellignore @@ -0,0 +1 @@ +NAM \ No newline at end of file diff --git a/.github/workflows/linting.yml b/.github/workflows/linting.yml index 3e3727922..c17369f44 100644 --- a/.github/workflows/linting.yml +++ b/.github/workflows/linting.yml @@ -46,4 +46,13 @@ jobs: REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | set -o pipefail - doc8 docs | reviewdog -efm='%f:%l: %m' -name=doc8 -reporter=github-check -filter-mode=nofilter \ No newline at end of file + doc8 docs | reviewdog -efm='%f:%l: %m' -name=doc8 -reporter=github-check -filter-mode=nofilter + + - name: Run codespell + # Don't skip codespell if any other steps fail + if: always() + env: + REVIEWDOG_GITHUB_API_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + set -o pipefail + codespell | reviewdog -efm='%f:%l: %m' -name=codespell -reporter=github-check -filter-mode=nofilter \ No newline at end of file diff --git a/ci/linting_requirements.txt b/ci/linting_requirements.txt index 8dfa68b07..72b7325fd 100644 --- a/ci/linting_requirements.txt +++ b/ci/linting_requirements.txt @@ -12,4 +12,6 @@ flake8-requirements==2.2.1 flake8-rst-docstrings==0.2.7 doc8==1.0.0 -restructuredtext_lint==1.4.0 \ No newline at end of file +restructuredtext_lint==1.4.0 + +codespell==2.3.0 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 662c958db..601b92b14 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,6 +62,11 @@ extras = [ "Bug Tracker" = "https://github.com/Unidata/siphon/issues" "Source Code" = "https://github.com/Unidata/siphon" +[tool.codespell] +skip = "*.ipynb,*.pdf,.git,./docs/build,./docs/examples,./tests/fixtures,./tests/cdmr/fixtures,AUTHORS.txt" +exclude-file = ".codespellexclude" +ignore-words = ".codespellignore" + [tool.doc8] ignore-path = ["docs/build", "docs/api/generated", "docs/_templates", "docs/examples"] file-encoding = "utf8" From 7957836d82bf1ffed76d7e5a3a3880a7c95e464f Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 4 Nov 2024 14:23:19 -0700 Subject: [PATCH 7/8] MNT: Fix typos found by codespell --- examples/acis/Basic_Overview.py | 2 +- src/siphon/cdmr/dataset.py | 2 +- src/siphon/ncss_dataset.py | 2 +- src/siphon/simplewebservice/acis.py | 2 +- src/siphon/simplewebservice/igra2.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/acis/Basic_Overview.py b/examples/acis/Basic_Overview.py index 281476d59..109d08f45 100644 --- a/examples/acis/Basic_Overview.py +++ b/examples/acis/Basic_Overview.py @@ -57,7 +57,7 @@ ########################################### # Now that we have our request information ready, we can call the acis_request -# function and recieve our data! +# function and receive our data! my_data = acis_request(method, parameters) diff --git a/src/siphon/cdmr/dataset.py b/src/siphon/cdmr/dataset.py index c62e8e66e..d34f539a9 100644 --- a/src/siphon/cdmr/dataset.py +++ b/src/siphon/cdmr/dataset.py @@ -336,7 +336,7 @@ def group(self): return self._group def isunlimited(self): - """Return whether the dimesion is unlimited.""" + """Return whether the dimension is unlimited.""" return self.unlimited def load_from_stream(self, dim): diff --git a/src/siphon/ncss_dataset.py b/src/siphon/ncss_dataset.py index 9a2d7e1a3..cbfc30211 100644 --- a/src/siphon/ncss_dataset.py +++ b/src/siphon/ncss_dataset.py @@ -52,7 +52,7 @@ def handle_typed_values(val, type_name, value_type): applying int, float to the values will work in most cases (i.e. the TDS encodes them as string values properly). - Examle XML element: + Example XML element: diff --git a/src/siphon/simplewebservice/acis.py b/src/siphon/simplewebservice/acis.py index a7b8cbe07..5e0f54a0f 100644 --- a/src/siphon/simplewebservice/acis.py +++ b/src/siphon/simplewebservice/acis.py @@ -21,7 +21,7 @@ def acis_request(method, params): ACIS Web Services is a distributed system! A call to the main URL can be delivered to any climate center running a public instance of the service. - This makes the calls efficient, but also occasionaly results in failed + This makes the calls efficient, but also occasionally results in failed calls when a server you are directed to is having problems. Generally, reconnecting after waiting a few seconds will resolve a problem. If problems are persistent, contact ACIS developers at the High Plains Regional Climate diff --git a/src/siphon/simplewebservice/igra2.py b/src/siphon/simplewebservice/igra2.py index deafbe2f8..66fe07c03 100644 --- a/src/siphon/simplewebservice/igra2.py +++ b/src/siphon/simplewebservice/igra2.py @@ -33,7 +33,7 @@ def __init__(self): @classmethod def request_data(cls, time, site_id, derived=False): - """Retreive IGRA version 2 data for one station. + """Retrieve IGRA version 2 data for one station. Parameters ---------- From fefcd30442d4582dba2ed2cdf3ae04f4acb61f91 Mon Sep 17 00:00:00 2001 From: Ryan May Date: Mon, 4 Nov 2024 14:36:30 -0700 Subject: [PATCH 8/8] MNT: Fix a couple things found by pyupgrade Changes for >= 3.10. --- docs/conf.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 458fa871e..1c772726c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# -*- coding: utf-8 -*- # # This file is execfile()d with the current directory set to its # containing dir. @@ -47,7 +46,7 @@ }, 'examples_dirs': [os.path.join('..', 'examples')], 'gallery_dirs': ['examples'], - 'filename_pattern': '\.py', + 'filename_pattern': r'\.py', 'backreferences_dir': 'api/generated', 'default_thumb_file': os.path.join('_static', 'siphon_150x150_white_bg.png'), 'abort_on_example_error': True