diff --git a/.github/workflows/cicd_docker.yml b/.github/workflows/cicd_docker.yml index a80bf05..f89233f 100644 --- a/.github/workflows/cicd_docker.yml +++ b/.github/workflows/cicd_docker.yml @@ -10,8 +10,8 @@ env: DOCKER_IMAGE_NAME: pdal_ign_plugin jobs: - build_docker_image_and_run_tests: + runs-on: ubuntu-latest steps: @@ -24,3 +24,4 @@ jobs: - name: Run tests in docker image run: docker run ${{ env.DOCKER_IMAGE_NAME }}:test python -m pytest + diff --git a/Dockerfile b/Dockerfile index 2b9dbb5..f082bce 100755 --- a/Dockerfile +++ b/Dockerfile @@ -9,7 +9,6 @@ RUN apt-get update && apt-get install --no-install-recommends -y cmake make buil COPY src src COPY CMakeLists.txt CMakeLists.txt - RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release RUN make -j4 install @@ -31,4 +30,4 @@ RUN pip install . # Add example scripts + test data (to be able to test inside the docker image) COPY scripts /pdal_ign_plugin/scripts -COPY test /pdal_ign_plugin/test \ No newline at end of file +COPY test /pdal_ign_plugin/test diff --git a/README.md b/README.md index 20f320e..1409bce 100755 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ The name of the folder informs of the plugIN nature (reader, writer, filter). The code should respect the documentation proposed by pdal: [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). Be careful to change if the plugIn is a reader, a writer or a filter. -The CMakeList should contains: +The CMakeList should contain: ``` file( GLOB_RECURSE GD_SRCS ${CMAKE_SOURCE_DIR} *) @@ -90,7 +90,6 @@ install(TARGETS pdal_plugin_filter_my_new_PI) ``` You should complete the main CMakeList by adding the new plugIN: - ``` add_subdirectory(src/filter_my_new_PI) ``` @@ -98,6 +97,7 @@ add_subdirectory(src/filter_my_new_PI) Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). Don't forget to update [the list](#list-of-filters) with a link to the documentation. +<<<<<<< Updated upstream ## `macro` python module usage diff --git a/environment_docker.yml b/environment_docker.yml index 9232812..6d00e85 100755 --- a/environment_docker.yml +++ b/environment_docker.yml @@ -7,7 +7,9 @@ dependencies: - python-pdal - gdal - pytest + # --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools + diff --git a/scripts/ex_filtering_points_with_add_dimensions.py b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py similarity index 76% rename from scripts/ex_filtering_points_with_add_dimensions.py rename to scripts/mark_points_to_use_for_digital_models_with_new_dimension.py index 01ca09b..636f5f2 100755 --- a/scripts/ex_filtering_points_with_add_dimensions.py +++ b/scripts/mark_points_to_use_for_digital_models_with_new_dimension.py @@ -17,22 +17,46 @@ def parse_args(): parser.add_argument( "--output_las", "-o", type=str, required=True, help="Output cloud las file" ) - parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") - parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") + parser.add_argument( + "--dsm_dimension", + type=str, + required=False, + default="dsm_marker", + help="Dimension name for the output DSM marker", + ) + parser.add_argument( + "--dtm_dimension", + type=str, + required=False, + default="dtm_marker", + help="Dimension name for the output DTM marker", + ) + parser.add_argument( + "--output_dsm", "-s", type=str, required=False, default="", help="Output dsm tiff file" + ) + parser.add_argument( + "--output_dtm", "-t", type=str, required=False, default="", help="Output dtm tiff file" + ) return parser.parse_args() if __name__ == "__main__": args = parse_args() - pipeline = pdal.Reader.las(args.input) + pipeline = pdal.Pipeline() | pdal.Reader.las(args.input) + dsm_dim = args.dsm_dimension + dtm_dim = args.dtm_dimension + + # Récupération des dimensions du fichier en entrée + input_dimensions = pipeline.quickinfo["readers.las"]["dimensions"].split(", ") # 0 - ajout de dimensions temporaires pipeline |= pdal.Filter.ferry( dimensions="=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE" ) - # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse + # 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en + # compte des points sol (2) et basse # vegetation (3) proche de la végétation # pour le calcul du DSM @@ -125,21 +149,25 @@ def parse_args(): pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"]) # 5 - export du nuage et des DSM + # Ajout des dimensions de sortie + pipeline |= pdal.Filter.ferry(dimensions=f"PT_GRID_DSM=>{dsm_dim}, PT_GRID_DTM=>{dtm_dim}") - pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) + pipeline |= pdal.Writer.las( + extra_dims=input_dimensions + [dtm_dim, dsm_dim], forward="all", filename=args.output_las + ) pipeline |= pdal.Writer.gdal( gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dtm, - where="PT_GRID_DTM==1", + where=f"{dtm_dim}==1", ) pipeline |= pdal.Writer.gdal( gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dsm, - where="PT_GRID_DSM==1", + where=f"{dsm_dim}==1", ) pipeline.execute()