diff --git a/.github/workflows/cicd_docker.yml b/.github/workflows/cicd_docker.yml index a80bf05..f89233f 100644 --- a/.github/workflows/cicd_docker.yml +++ b/.github/workflows/cicd_docker.yml @@ -10,8 +10,8 @@ env: DOCKER_IMAGE_NAME: pdal_ign_plugin jobs: - build_docker_image_and_run_tests: + runs-on: ubuntu-latest steps: @@ -24,3 +24,4 @@ jobs: - name: Run tests in docker image run: docker run ${{ env.DOCKER_IMAGE_NAME }}:test python -m pytest + diff --git a/Dockerfile b/Dockerfile index 2b9dbb5..f082bce 100755 --- a/Dockerfile +++ b/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install --no-install-recommends -y cmake make buil COPY src src COPY CMakeLists.txt CMakeLists.txt - RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release RUN make -j4 install @@ -31,4 +30,4 @@ RUN pip install . # Add example scripts + test data (to be able to test inside the docker image) COPY scripts /pdal_ign_plugin/scripts -COPY test /pdal_ign_plugin/test \ No newline at end of file +COPY test /pdal_ign_plugin/test diff --git a/README.md b/README.md index 20f320e..1409bce 100755 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ The name of the folder informs of the plugIN nature (reader, writer, filter). The code should respect the documentation proposed by pdal: [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). Be careful to change if the plugIn is a reader, a writer or a filter. -The CMakeList should contains: +The CMakeList should contain: ``` file( GLOB_RECURSE GD_SRCS ${CMAKE_SOURCE_DIR} *) @@ -90,7 +90,6 @@ install(TARGETS pdal_plugin_filter_my_new_PI) ``` You should complete the main CMakeList by adding the new plugIN: - ``` add_subdirectory(src/filter_my_new_PI) ``` @@ -98,6 +97,7 @@ add_subdirectory(src/filter_my_new_PI) Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). Don't forget to update [the list](#list-of-filters) with a link to the documentation. +<<<<<<< Updated upstream ## `macro` python module usage diff --git a/environment_docker.yml b/environment_docker.yml index 9232812..6d00e85 100755 --- a/environment_docker.yml +++ b/environment_docker.yml @@ -7,7 +7,9 @@ dependencies: - python-pdal - gdal - pytest + # --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools + diff --git a/scripts/ex_filtering_points_with_add_dimensions.py b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py similarity index 53% rename from scripts/ex_filtering_points_with_add_dimensions.py rename to scripts/mark_points_to_use_for_digital_models_with_new_dimension.py index 01ca09b..819c252 100755 --- a/scripts/ex_filtering_points_with_add_dimensions.py +++ b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py @@ -5,34 +5,59 @@ from macro import macro """ -This tool shows how to use functions of macro in a pdal pipeline +This tool applies a pdal pipeline to select points for DSM and DTM calculation +It adds dimensions with positive values for the selected points """ def parse_args(): parser = argparse.ArgumentParser( - "Tool to apply pdal pipelines for DSM and DTM calculation (with add dimensions for the concerned points)" + "Tool to apply pdal pipelines to select points for DSM and DTM calculation" + + "(add dimensions with positive values for the selected points)" ) parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") parser.add_argument( "--output_las", "-o", type=str, required=True, help="Output cloud las file" ) - parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") - parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") + parser.add_argument( + "--dsm_dimension", + type=str, + required=False, + default="dsm_marker", + help="Dimension name for the output DSM marker", + ) + parser.add_argument( + "--dtm_dimension", + type=str, + required=False, + default="dtm_marker", + help="Dimension name for the output DTM marker", + ) + parser.add_argument( + "--output_dsm", "-s", type=str, required=False, default="", help="Output dsm tiff file" + ) + parser.add_argument( + "--output_dtm", "-t", type=str, required=False, default="", help="Output dtm tiff file" + ) return parser.parse_args() if __name__ == "__main__": args = parse_args() - pipeline = pdal.Reader.las(args.input) + pipeline = pdal.Pipeline() | pdal.Reader.las(args.input) + dsm_dim = args.dsm_dimension + dtm_dim = args.dtm_dimension - # 0 - ajout de dimensions temporaires - pipeline |= pdal.Filter.ferry( - dimensions="=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE" - ) + # Récupération des dimensions du fichier en entrée + input_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ") + + # 0 - ajout de dimensions temporaires et de sortie + added_dimensions = [dtm_dim, dsm_dim, "PT_VEG_DSM", "PT_ON_BRIDGE"] + pipeline |= pdal.Filter.ferry(dimensions="=>" + ", =>".join(added_dimensions)) - # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse + # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en + # compte des points sol (2) et basse # vegetation (3) proche de la végétation # pour le calcul du DSM @@ -70,24 +95,22 @@ def parse_args(): # max des points de veget (PT_VEG_DSM==1) sur une grille régulière : pipeline |= pdal.Filter.gridDecimation( - resolution=0.75, value="PT_GRID_DSM=1", output_type="max", where="PT_VEG_DSM==1" + resolution=0.75, value=f"{dsm_dim}=1", output_type="max", where="PT_VEG_DSM==1" ) # 2 - sélection des points pour DTM et DSM # selection de points DTM (max) sur une grille régulière pipeline |= pdal.Filter.gridDecimation( - resolution=0.5, value="PT_GRID_DTM=1", output_type="max", where="Classification==2" + resolution=0.5, value=f"{dtm_dim}=1", output_type="max", where="Classification==2" ) # selection de points DSM (max) sur une grille régulière pipeline |= pdal.Filter.gridDecimation( resolution=0.5, - value="PT_GRID_DSM=1", + value=f"{dsm_dim}=1", output_type="max", - where="(" - + macro.build_condition("Classification", [6, 9, 17, 64]) - + ") || PT_GRID_DSM==1", + where="(" + macro.build_condition("Classification", [6, 9, 17, 64]) + f") || {dsm_dim}==1", ) # assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64 @@ -95,13 +118,16 @@ def parse_args(): pipeline, 1.5, False, - condition_src="PT_GRID_DTM==1", + condition_src=f"{dtm_dim}==1", condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]), - condition_out="PT_GRID_DSM=1", + condition_out=f"{dsm_dim}=1", ) # 3 - gestion des ponts # bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1 + # TODO: ajouter "demi-cylindre" : + # - si végétation au dessus du pont alors on choisit la végétation + # - si végétation en dessous du pont alors on choisit le pont pipeline = macro.add_radius_assign( pipeline, @@ -119,27 +145,32 @@ def parse_args(): condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), condition_out="PT_ON_BRIDGE=0", ) - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=0 WHERE PT_ON_BRIDGE==1"]) + pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=0 WHERE PT_ON_BRIDGE==1"]) # 4 - point pour DTM servent au DSM également - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"]) + pipeline |= pdal.Filter.assign(value=[f"{dsm_dim}=1 WHERE {dtm_dim}==1"]) # 5 - export du nuage et des DSM + # TODO: n'ajouter que les dimensions de sortie utiles ! pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) - pipeline |= pdal.Writer.gdal( - gdaldriver="GTiff", - output_type="max", - resolution=2.0, - filename=args.output_dtm, - where="PT_GRID_DTM==1", - ) - pipeline |= pdal.Writer.gdal( - gdaldriver="GTiff", - output_type="max", - resolution=2.0, - filename=args.output_dsm, - where="PT_GRID_DSM==1", - ) + + if args.output_dtm: + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dtm, + where=f"{dtm_dim}==1", + ) + + if args.output_dsm: + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dsm, + where=f"{dsm_dim}==1", + ) pipeline.execute()