Skip to content

Commit

Permalink
Update dsm/dtm marking script
Browse files Browse the repository at this point in the history
- select output dimensions
- make dsm/dtm output optional
  • Loading branch information
leavauchier committed Jun 6, 2024
1 parent 59bdfdc commit 46dbce0
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 39 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/cicd_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ env:
DOCKER_IMAGE_NAME: pdal_ign_plugin

jobs:

build_docker_image_and_run_tests:

runs-on: ubuntu-latest

steps:
Expand All @@ -24,3 +24,4 @@ jobs:
- name: Run tests in docker image
run: docker run ${{ env.DOCKER_IMAGE_NAME }}:test python -m pytest


3 changes: 1 addition & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install --no-install-recommends -y cmake make buil

COPY src src
COPY CMakeLists.txt CMakeLists.txt

RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release
RUN make -j4 install

Expand All @@ -31,4 +30,4 @@ RUN pip install .

# Add example scripts + test data (to be able to test inside the docker image)
COPY scripts /pdal_ign_plugin/scripts
COPY test /pdal_ign_plugin/test
COPY test /pdal_ign_plugin/test
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ The name of the folder informs of the plugIN nature (reader, writer, filter).
The code should respect the documentation proposed by pdal: [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html).
Be careful to change if the plugIn is a reader, a writer or a filter.

The CMakeList should contains:
The CMakeList should contain:

```
file( GLOB_RECURSE GD_SRCS ${CMAKE_SOURCE_DIR} *)
Expand All @@ -90,14 +90,14 @@ install(TARGETS pdal_plugin_filter_my_new_PI)
```

You should complete the main CMakeList by adding the new plugIN:

```
add_subdirectory(src/filter_my_new_PI)
```

Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md).

Don't forget to update [the list](#list-of-filters) with a link to the documentation.
<<<<<<< Updated upstream

## `macro` python module usage

Expand Down
2 changes: 2 additions & 0 deletions environment_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@ dependencies:
- python-pdal
- gdal
- pytest

# --------- pip & pip libraries --------- #
- pip
- pip:
- ign-pdal-tools

Original file line number Diff line number Diff line change
Expand Up @@ -5,34 +5,59 @@
from macro import macro

"""
This tool shows how to use functions of macro in a pdal pipeline
This tool applies a pdal pipeline to select points for DSM and DTM calculation
It adds dimensions with positive values for the selected points
"""


def parse_args():
parser = argparse.ArgumentParser(
"Tool to apply pdal pipelines for DSM and DTM calculation (with add dimensions for the concerned points)"
"Tool to apply pdal pipelines to select points for DSM and DTM calculation"
+ "(add dimensions with positive values for the selected points)"
)
parser.add_argument("--input", "-i", type=str, required=True, help="Input las file")
parser.add_argument(
"--output_las", "-o", type=str, required=True, help="Output cloud las file"
)
parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file")
parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file")
parser.add_argument(
"--dsm_dimension",
type=str,
required=False,
default="dsm_marker",
help="Dimension name for the output DSM marker",
)
parser.add_argument(
"--dtm_dimension",
type=str,
required=False,
default="dtm_marker",
help="Dimension name for the output DTM marker",
)
parser.add_argument(
"--output_dsm", "-s", type=str, required=False, default="", help="Output dsm tiff file"
)
parser.add_argument(
"--output_dtm", "-t", type=str, required=False, default="", help="Output dtm tiff file"
)
return parser.parse_args()


if __name__ == "__main__":
args = parse_args()

pipeline = pdal.Reader.las(args.input)
pipeline = pdal.Pipeline() | pdal.Reader.las(args.input)
dsm_dim = args.dsm_dimension
dtm_dim = args.dtm_dimension

# 0 - ajout de dimensions temporaires
pipeline |= pdal.Filter.ferry(
dimensions="=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE"
)
# Récupération des dimensions du fichier en entrée
input_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ")

# 0 - ajout de dimensions temporaires et de sortie
added_dimensions = [dtm_dim, dsm_dim, "PT_VEG_DSM", "PT_ON_BRIDGE"]
pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions))

# 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse
# 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en
# compte des points sol (2) et basse
# vegetation (3) proche de la végétation
# pour le calcul du DSM

Expand Down Expand Up @@ -70,38 +95,39 @@ def parse_args():

# max des points de veget (PT_VEG_DSM==1) sur une grille régulière :
pipeline |= pdal.Filter.gridDecimation(
resolution=0.75, value="PT_GRID_DSM=1", output_type="max", where="PT_VEG_DSM==1"
resolution=0.75, value=f"{dsm_dim}=1", output_type="max", where="PT_VEG_DSM==1"
)

# 2 - sélection des points pour DTM et DSM

# selection de points DTM (max) sur une grille régulière
pipeline |= pdal.Filter.gridDecimation(
resolution=0.5, value="PT_GRID_DTM=1", output_type="max", where="Classification==2"
resolution=0.5, value=f"{dtm_dim}=1", output_type="max", where="Classification==2"
)

# selection de points DSM (max) sur une grille régulière
pipeline |= pdal.Filter.gridDecimation(
resolution=0.5,
value="PT_GRID_DSM=1",
value=f"{dsm_dim}=1",
output_type="max",
where="("
+ macro.build_condition("Classification", [6, 9, 17, 64])
+ ") || PT_GRID_DSM==1",
where="(" + macro.build_condition("Classification", [6, 9, 17, 64]) + f") || {dsm_dim}==1",
)

# assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64
pipeline = macro.add_radius_assign(
pipeline,
1.5,
False,
condition_src="PT_GRID_DTM==1",
condition_src=f"{dtm_dim}==1",
condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]),
condition_out="PT_GRID_DSM=1",
condition_out=f"{dsm_dim}=1",
)

# 3 - gestion des ponts
# bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1
# TODO: ajouter "demi-cylindre" :
# - si végétation au dessus du pont alors on choisit la végétation
# - si végétation en dessous du pont alors on choisit le pont

pipeline = macro.add_radius_assign(
pipeline,
Expand All @@ -119,27 +145,32 @@ def parse_args():
condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]),
condition_out="PT_ON_BRIDGE=0",
)
pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=0 WHERE PT_ON_BRIDGE==1"])
pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=0 WHERE PT_ON_BRIDGE==1"])

# 4 - point pour DTM servent au DSM également
pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"])
pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=1 WHERE {dtm_dim}==1"])

# 5 - export du nuage et des DSM
# TODO: n'ajouter que les dimensions de sortie utiles !

pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las)
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dtm,
where="PT_GRID_DTM==1",
)
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dsm,
where="PT_GRID_DSM==1",
)

if args.output_dtm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dtm,
where=f"{dtm_dim}==1",
)

if args.output_dsm:
pipeline |= pdal.Writer.gdal(
gdaldriver="GTiff",
output_type="max",
resolution=2.0,
filename=args.output_dsm,
where=f"{dsm_dim}==1",
)

pipeline.execute()

0 comments on commit 46dbce0

Please sign in to comment.