Skip to content

Commit

Permalink
Python 3.12 support (#118)
Browse files Browse the repository at this point in the history
* Move cython to dev deps

* Python 3.12 pickle fix

* Add py 3.12 test

* Update changelog

* Use context with 'spawn' method for child processes

* Update pyproject.toml

* Removed unused _BaseHandler

* Added context attribute to base model class

* Update codecov upload

* Update changelog

* Add build badge
  • Loading branch information
dalmijn authored Apr 9, 2024
1 parent 89b9a4d commit e9eef57
Show file tree
Hide file tree
Showing 9 changed files with 76 additions and 91 deletions.
10 changes: 7 additions & 3 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ jobs:
strategy:
fail-fast: false
matrix:
os: ["ubuntu-latest" ] #, "macos-latest", "windows-latest"]
python-version: ["3.10", "3.11"] # fix tests to support older versions
os: ["ubuntu-latest"]
python-version: ["3.10", "3.11", "3.12"]
include:
- os: ubuntu-latest
label: linux-64
Expand Down Expand Up @@ -83,4 +83,8 @@ jobs:
path: coverage.xml

- name: Upload coverage to codecov.io
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
files: ./coverage.xml
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Delft-FIAT: Fast Impact Assessment Tool
[![PyPI Latest Release](https://img.shields.io/pypi/v/delft_fiat.svg)](https://pypi.org/project/Delft-FIAT/) [![Conda Latest Release](https://anaconda.org/conda-forge/delft_fiat/badges/version.svg)](https://anaconda.org/conda-forge/delft_fiat) [![Coverage](https://codecov.io/github/Deltares/Delft-FIAT/coverage.svg?branch=master)](https://codecov.io/github/Deltares/Delft-FIAT)
[![PyPI Latest Release](https://img.shields.io/pypi/v/delft_fiat.svg)](https://pypi.org/project/Delft-FIAT/) [![Conda Latest Release](https://anaconda.org/conda-forge/delft_fiat/badges/version.svg)](https://anaconda.org/conda-forge/delft_fiat) [![Coverage](https://codecov.io/github/Deltares/Delft-FIAT/coverage.svg?branch=master)](https://codecov.io/github/Deltares/Delft-FIAT) ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/Deltares/Delft-FIAT/build.yml)

Combined assessment for spatial (raster) and object (vector).

Expand Down
4 changes: 4 additions & 0 deletions docs/changelog.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -13,18 +13,22 @@ This contains the unreleased changes to Delft-FIAT.
- Method `set` of `ConfigReader` object
- Method `size` of `GeomSource` object for in situ size return; `count` becomes private
- Method `size` of `GridSource` object for in situ size return; `count` becomes private
- Python 3.12 support
- Setting return period as a variable in hazard map bands (risk)

### Changed
- Better version of `BufferHandler`
- Made read methods of `BaseModel`, `GeomModel` and `GridModel` public (removed underscore)
- Parallelization now depends solely on `multiprocessing` instead of on `concurrent.futures`
- Testing of workers (not properly caught due to using `multiprocessing`)
- Testing only based on integers

### Deprecated
- Base object `_BaseHandler`; incompatible with Python 3.12
- Methods `_create_output_dir`, `_create_tmp_dir` and `_create_risk_dir` of the `ConfigReader` object
- Setting return period via the name of the hazard band (risk)
- Support of `Python` versions under `3.9.0`
- `TextHandler` object; unused

### Documentation

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ classifiers = [
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
]

[project.optional-dependencies] # Optional
Expand Down
136 changes: 52 additions & 84 deletions src/fiat/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import os
import weakref
from abc import ABCMeta, abstractmethod
from io import BufferedReader, BytesIO, FileIO, TextIOWrapper
from io import BufferedReader, BytesIO, FileIO
from math import floor, log10
from multiprocessing.synchronize import Lock
from pathlib import Path
Expand Down Expand Up @@ -128,37 +128,6 @@ def flush(self):
raise NotImplementedError(NEED_IMPLEMENTED)


class _BaseHandler(metaclass=ABCMeta):
def __init__(
self,
file: str,
skip: int = 0,
) -> "_BaseHandler":
"""_summary_."""
self.path = Path(file)

self.skip = skip
self.size = self.read().count(os.linesep.encode())

self.seek(self.skip)

def __del__(self):
self.flush()
self.close()

@abstractmethod
def __repr__(self):
raise NotImplementedError(DD_NEED_IMPLEMENTED)

def __enter__(self):
return super().__enter__()

def __exit__(self, exc_type, exc_val, exc_tb):
self.flush()
self.seek(self.skip)
return False


class _BaseStruct(metaclass=ABCMeta):
"""A struct container."""

Expand Down Expand Up @@ -187,34 +156,59 @@ def _update_kwargs(


## Handlers
class BufferHandler(_BaseHandler, BufferedReader):
"""_summary_."""
class BufferHandler:
"""Handler for buffers."""

def __init__(
self,
file: str,
path: Path,
skip: int = 0,
) -> "BufferHandler":
"""_summary_.
Parameters
----------
file : str
_description_
):
self.path = path
self.size = None
self.skip = skip
self.stream = None

Returns
-------
BufferHandler
_description_
"""
BufferedReader.__init__(self, FileIO(file))
_BaseHandler.__init__(self, file, skip)
if self.stream is None:
self.setup_stream()

def __repr__(self):
return f"<{self.__class__.__name__} file='{self.path}' encoding=''>"

def __reduce__(self):
return self.__class__, (self.path, self.skip)
def __getstate__(self):
if self.stream is not None:
self.close_stream()
return self.__dict__

def __setstate__(self, d):
self.__dict__ = d
self.setup_stream()

def __enter__(self):
return self.stream.__enter__()

def __exit__(self, exc_type, exc_val, exc_tb):
self.stream.flush()
self.stream.seek(self.skip)
return False

def close(self):
"""_summary_."""
if self.stream is not None:
self.stream.flush()
self.close_stream()

def close_stream(self):
"""_summary_."""
self.stream.close()
self.stream = None
self.size = None

def setup_stream(self):
"""_summary_."""
self.stream = BufferedReader(FileIO(self.path))
self.size = self.stream.read().count(os.linesep.encode())
self.stream.seek(self.skip)


class BufferedGeomWriter:
Expand Down Expand Up @@ -427,32 +421,6 @@ def write(self, b):
BytesIO.write(self, b)


class TextHandler(_BaseHandler, TextIOWrapper):
"""_summary_."""

def __init__(
self,
file: str,
) -> "TextHandler":
"""_summary_.
Parameters
----------
file : str
_description_
Returns
-------
TextHandler
_description_
"""
TextIOWrapper.__init__(self, FileIO(file))
_BaseHandler.__init__()

def __repr__(self):
return f"<{self.__class__.__name__} file='{self.path}'>"


## Parsing
class CSVParser:
"""_summary_."""
Expand Down Expand Up @@ -536,12 +504,12 @@ def _parse_meta(
):
"""_summary_."""
_pat = regex_pattern(self.delim)
self.data.seek(0)
self.data.stream.seek(0)

while True:
self._nrow -= 1
cur_pos = self.data.tell()
line = self.data.readline().decode("utf-8-sig")
cur_pos = self.data.stream.tell()
line = self.data.stream.readline().decode("utf-8-sig")

if line.startswith("#"):
t = line.strip().split("=")
Expand All @@ -566,7 +534,7 @@ def _parse_meta(
if not header:
self.columns = None
self._ncol = len(_pat.split(line.encode("utf-8-sig")))
self.data.seek(cur_pos)
self.data.stream.seek(cur_pos)
self._nrow += 1
break

Expand All @@ -575,7 +543,7 @@ def _parse_meta(
self._ncol = len(self.columns)
break

self.data.skip = self.data.tell()
self.data.skip = self.data.stream.tell()
self.meta["ncol"] = self._ncol
self.meta["nrow"] = self._nrow

Expand Down Expand Up @@ -1963,9 +1931,9 @@ def __getitem__(
except Exception:
return None

self.data.seek(idx)
self.data.stream.seek(idx)

return self.data.readline().strip()
return self.data.stream.readline().strip()

def _build_lazy(self):
raise NotImplementedError(NOT_IMPLEMENTED)
Expand Down
3 changes: 2 additions & 1 deletion src/fiat/models/base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Base model of FIAT."""

from abc import ABCMeta, abstractmethod
from multiprocessing import Manager
from multiprocessing import Manager, get_context
from os import cpu_count

from osgeo import osr
Expand Down Expand Up @@ -51,6 +51,7 @@ def __init__(
# Temporay files
self._keep_temp = False
# Threading stuff
self._mp_ctx = get_context("spawn")
self._mp_manager = Manager()
self.max_threads = 1
self.nthreads = 1
Expand Down
2 changes: 2 additions & 0 deletions src/fiat/models/geom.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,7 @@ def resolve(
# Execute the jobs
logger.info("Busy...")
execute_pool(
ctx=self._mp_ctx,
func=geom_resolve,
jobs=jobs,
threads=self.nthreads,
Expand Down Expand Up @@ -311,6 +312,7 @@ def run(
_s = time.time()
logger.info("Busy...")
execute_pool(
ctx=self._mp_ctx,
func=geom_worker,
jobs=jobs,
threads=self.nthreads,
Expand Down
4 changes: 4 additions & 0 deletions src/fiat/models/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,14 @@ def run(self):
}
)

logger.info(f"Using number of threads: {self.nthreads}")

# Execute the jobs
_s = time.time()
logger.info("Busy...")
pcount = min(self.max_threads, self.hazard_grid.size)
execute_pool(
ctx=self._mp_ctx,
func=grid_worker_exact,
jobs=jobs,
threads=pcount,
Expand Down
5 changes: 3 additions & 2 deletions src/fiat/models/util.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""The FIAT model workers."""

from itertools import product
from multiprocessing import Pool
from multiprocessing.context import SpawnContext
from pathlib import Path
from typing import Callable, Generator

Expand Down Expand Up @@ -91,6 +91,7 @@ def generate_jobs(


def execute_pool(
ctx: SpawnContext,
func: Callable,
jobs: Generator,
threads: int,
Expand All @@ -105,7 +106,7 @@ def execute_pool(
# If there are more threads needed however
processes = []
# Setup the multiprocessing pool
pool = Pool(processes=threads)
pool = ctx.Pool(processes=threads)

# Go through all the jobs
for job in jobs:
Expand Down

0 comments on commit e9eef57

Please sign in to comment.