Skip to content

Commit

Permalink
Added lakes and added a test
Browse files Browse the repository at this point in the history
  • Loading branch information
rosepearson committed Nov 7, 2024
1 parent 3c4cffd commit 6abd7d0
Show file tree
Hide file tree
Showing 7 changed files with 120 additions and 19 deletions.
25 changes: 13 additions & 12 deletions src/geofabrics/dem.py
Original file line number Diff line number Diff line change
Expand Up @@ -300,6 +300,7 @@ class DemBase(abc.ABC):
"patch": 6,
"stopbanks": 7,
"masked feature": 8,
"lakes": 9,
"interpolated": 0,
"no data": -1,
}
Expand Down Expand Up @@ -1108,7 +1109,7 @@ def clip_within_polygon(self, polygon_paths: list, label: str):

def interpolate_elevations_within_polygon(
self,
elevations: geometry.EstimatedElevationPoints,
elevations: geometry.ElevationPoints,
method: str,
cache_path: pathlib.Path,
label: str,
Expand Down Expand Up @@ -1171,7 +1172,7 @@ def interpolate_elevations_within_polygon(
point_cloud = numpy.concatenate([edge_points, point_cloud])

# Save river points in a temporary laz file
lidar_file = cache_path / "waterways_points.laz"
lidar_file = cache_path / f"{label}_points.laz"
pdal_pipeline_instructions = [
{
"type": "writers.las",
Expand All @@ -1196,7 +1197,7 @@ def interpolate_elevations_within_polygon(
self.logger.info(f"Preparing {[len(chunked_dim_x), len(chunked_dim_y)]} chunks")

# cycle through index chunks - and collect in a delayed array
self.logger.info("Running over ocean chunked")
self.logger.info(f"Running over {label} chunked")
delayed_chunked_matrix = []
for i, dim_y in enumerate(chunked_dim_y):
delayed_chunked_x = []
Expand Down Expand Up @@ -1248,9 +1249,10 @@ def interpolate_elevations_within_polygon(

def interpolate_rivers(
self,
elevations: geometry.EstimatedElevationPoints,
elevations: geometry.ElevationPoints,
method: str,
cache_path: pathlib.Path,
label: str,
k_nearest_neighbours: int = 100,
) -> xarray.Dataset:
"""Performs interpolation from estimated bathymetry points within a polygon
Expand All @@ -1275,7 +1277,7 @@ def interpolate_rivers(

# Extract and saveriver elevations
river_points = elevations.points_array
river_points_file = cache_path / "river_points.laz"
river_points_file = cache_path / f"{label}_points.laz"
pdal_pipeline_instructions = [
{
"type": "writers.las",
Expand Down Expand Up @@ -1355,7 +1357,7 @@ def interpolate_rivers(
edge_points["Y"] = flat_y[mask_z]
edge_points["Z"] = flat_z[mask_z]

river_edge_file = cache_path / "river_edge_points.laz"
river_edge_file = cache_path / f"{label}_edge_points.laz"
pdal_pipeline_instructions = [
{
"type": "writers.las",
Expand Down Expand Up @@ -1396,7 +1398,7 @@ def interpolate_rivers(
self.logger.info(f"Preparing {[len(chunked_dim_x), len(chunked_dim_y)]} chunks")

# cycle through index chunks - and collect in a delayed array
self.logger.info("Running over ocean chunked")
self.logger.info("Running over river chunked")
delayed_chunked_matrix = []
for i, dim_y in enumerate(chunked_dim_y):
delayed_chunked_x = []
Expand Down Expand Up @@ -1447,7 +1449,7 @@ def interpolate_rivers(
mask = ~(rivers_mask & self._dem.z.notnull())
self._dem["data_source"] = self._dem.data_source.where(
mask,
self.SOURCE_CLASSIFICATION["rivers and fans"],
self.SOURCE_CLASSIFICATION[label],
)
self._dem["lidar_source"] = self._dem.lidar_source.where(
mask, self.SOURCE_CLASSIFICATION["no data"]
Expand Down Expand Up @@ -3216,11 +3218,10 @@ def elevation_from_nearest_points(
if k > len(point_cloud) or (options["use_edge"] and k > len(edge_point_cloud)):
logger.warning(
f"Fewer points than the nearest k to search for provided: k = {k} "
f"> points {len(point_cloud)} or edge points "
f"{len(edge_point_cloud)}. Returning NaN array."
f"> points {len(point_cloud)} or edge points {len(edge_point_cloud)}."
" Updating k to the avaliable number of points."
)
z_out = numpy.ones(len(xy_out), dtype=options["raster_type"]) * numpy.nan
return z_out
k = min(len(point_cloud), len(edge_point_cloud)) if options["use_edge"] else len(point_cloud)
xy_in = numpy.empty((len(point_cloud), 2))
xy_in[:, 0] = point_cloud["X"]
xy_in[:, 1] = point_cloud["Y"]
Expand Down
41 changes: 39 additions & 2 deletions src/geofabrics/geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,7 +553,7 @@ def sample(self) -> numpy.ndarray:
return points


class EstimatedElevationPoints:
class ElevationPoints:
"""A class for accessing estimated or measured river, mouth and waterway
elevations as points. Paired elevation and polygon files are expected.
The elevations are used to interpolate elevations within the polygons.
Expand Down Expand Up @@ -630,7 +630,7 @@ def _set_up(
polygon_list.append(polygon_i)
# Set CRS, clip to size and reset index
if len(points_list) == 0:
self.logger.warning("No waterways elevations. Ignoring.")
self.logger.warning("No elevations. Ignoring.")
self._points = []
self._polygon = []
return
Expand Down Expand Up @@ -757,6 +757,43 @@ def z(self):
self._z = self._points.apply(lambda row: row.geometry.z, axis=1).to_list()
return self._z

class ElevationContours(ElevationPoints):
""" Resample at spatial resolution at points """

def __init__(
self,
points_files: list,
polygon_files: list,
catchment_geometry: CatchmentGeometry,
z_labels: list = None,
):
super(ElevationContours, self).__init__(
points_files=points_files,
polygon_files=polygon_files,
catchment_geometry=catchment_geometry,
filter_osm_ids=[],
z_labels=z_labels,
)
self.logger = logging.getLogger(f"{__name__}.{self.__class__.__name__}")

# convert contoursto samples points at resolution
self.sample_contours(self.catchment_geometry.resolution)

def sample_contours(self, resolution: float) -> numpy.ndarray:
"""Sample the contours at the specified resolution."""

# convert contours to multipoints
self._points.loc[:, "geometry"] = self._points.geometry.apply(
lambda row: shapely.geometry.MultiPoint(
[
row.interpolate(i * resolution)
for i in range(int(numpy.ceil(row.length / resolution)))
]
)
)
self._points = self._points.explode(index_parts=True, ignore_index=True)



class TileInfo:
"""A class for working with tiling information."""
Expand Down
68 changes: 65 additions & 3 deletions src/geofabrics/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,13 +290,15 @@ def get_instruction_general(self, key: str, subkey: str = None):
"interpolation": {
"rivers": "rbf",
"waterways": "cubic",
"lakes": "linear",
"stopbanks": "nearest",
"ocean": "rbf",
"lidar": "idw",
"no_data": None,
},
"z_labels": {
"waterways": "z",
"lakes": "z",
"stopbanks": "z",
"rivers": "z",
"ocean": None,
Expand Down Expand Up @@ -1233,7 +1235,7 @@ def add_hydrological_features(
self.logger.info(f"Incorporating waterways: {elevations}")

# Load in bathymetry
estimated_elevations = geometry.EstimatedElevationPoints(
estimated_elevations = geometry.ElevationPoints(
points_files=elevations,
polygon_files=polygons,
filter_osm_ids=self.get_instruction_general(
Expand Down Expand Up @@ -1265,6 +1267,65 @@ def add_hydrological_features(
if cached_file is not None:
self.clean_cached_file(cached_file)
cached_file = temp_file
# Check for lakes
if "lakes" in self.instructions["data_paths"]:
# Loop through each lake in turn adding individually
subfolder = self.get_instruction_path(key="subfolder")
z_labels = self.get_instruction_general(key="z_labels", subkey="lakes")
lakes = self.instructions["data_paths"]["lakes"]
if isinstance(z_labels, str):
z_labels = [z_labels for i in range(len(lakes))]
elif not isinstance(z_labels, list) or len(z_labels) != len(lakes):
raise ValueError(
"There is a mismatch in length between the provided z_labels "
f"and the lakes: {z_labels} {lakes}"
)
for index, lake_dict in enumerate(lakes):
elevation = pathlib.Path(lake_dict["elevations"])
polygon = pathlib.Path(lake_dict["extents"])
if not elevation.is_absolute():
elevation = subfolder / elevation
if not polygon.is_absolute():
polygon = subfolder / polygon

self.logger.info(f"Incorporating lake: {elevation}")
# Load in elevations
elevations = geometry.ElevationContours(
points_files=[elevation],
polygon_files=[polygon],
catchment_geometry=self.catchment_geometry,
z_labels=z_labels[index],
)

if (
len(elevations.points_array) == 0
or elevations.polygons.area.sum()
< self.catchment_geometry.resolution**2
):
self.logger.warning(
"No points or an area less than one grid cell in "
f"lake {elevation}. Ignoring."
)
continue

# Add lake to DEM
hydrologic_dem.interpolate_rivers(
elevations=elevations,
method=self.get_instruction_general(
key="interpolation", subkey="lakes"
),
cache_path=temp_folder,
label="lakes",
)
temp_file = temp_folder / f"dem_added_{index + 1}_lake.nc"
self.logger.info(
f"Save temp DEM with lake {index + 1} added to netCDF: {temp_file}"
)
hydrologic_dem.save_and_load_dem(temp_file)
# Remove previous cached file and replace with new one
if cached_file is not None:
self.clean_cached_file(cached_file)
cached_file = temp_file
# Load in river bathymetry and incorporate where discernable at the resolution
if "rivers" in self.instructions["data_paths"]:
# Loop through each river in turn adding individually
Expand All @@ -1289,7 +1350,7 @@ def add_hydrological_features(
self.logger.info(f"Incorporating river: {elevation}")

# Load in bathymetry
estimated_elevations = geometry.EstimatedElevationPoints(
estimated_elevations = geometry.ElevationPoints(
points_files=[elevation],
polygon_files=[polygon],
catchment_geometry=self.catchment_geometry,
Expand All @@ -1315,6 +1376,7 @@ def add_hydrological_features(
key="interpolation", subkey="rivers"
),
cache_path=temp_folder,
label="rivers and fans",
)
temp_file = temp_folder / f"dem_added_{index + 1}_rivers.nc"
self.logger.info(
Expand Down Expand Up @@ -1346,7 +1408,7 @@ def add_hydrological_features(
self.logger.info(f"Incorporating stopbanks: {elevations}")

# Load in bathymetry
estimated_elevations = geometry.EstimatedElevationPoints(
estimated_elevations = geometry.ElevationPoints(
points_files=elevations,
polygon_files=polygons,
catchment_geometry=self.catchment_geometry,
Expand Down
4 changes: 2 additions & 2 deletions tests/test_many_stages_waikanae/data/benchmark.nc
Git LFS file not shown
Binary file not shown.
Binary file not shown.
1 change: 1 addition & 0 deletions tests/test_many_stages_waikanae/instruction.json
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,7 @@
"rivers": [{"extents": "rivers/river_polygon.geojson", "elevations": "rivers/river_bathymetry.geojson"}],
"waterways": [{"extents": "waterways/closed_waterways_polygon.geojson", "elevations": "waterways/closed_waterways_elevation.geojson"},
{"extents": "waterways/open_waterways_polygon.geojson", "elevations": "waterways/open_waterways_elevation.geojson"}],
"lakes": [{"extents": "../lake_outline.gpkg", "elevations": "../lake_contours.gpkg"}],
"result_dem": "test_dem.nc"
},
"datasets": {
Expand Down

0 comments on commit 6abd7d0

Please sign in to comment.