diff --git a/.github/workflows/cicd_docker.yml b/.github/workflows/cicd_docker.yml new file mode 100644 index 0000000..a80bf05 --- /dev/null +++ b/.github/workflows/cicd_docker.yml @@ -0,0 +1,26 @@ +name: cicd_docker + +on: + # Run tests for non-draft pull request on main + pull_request: + branches: + - main + +env: + DOCKER_IMAGE_NAME: pdal_ign_plugin + +jobs: + + build_docker_image_and_run_tests: + runs-on: ubuntu-latest + + steps: + - name: Checkout branch + uses: actions/checkout@v3 + + - name: Build docker image + run: docker build -t ${{ env.DOCKER_IMAGE_NAME }}:test . + + - name: Run tests in docker image + run: docker run ${{ env.DOCKER_IMAGE_NAME }}:test python -m pytest + diff --git a/.github/workflows/cicd_test.yml b/.github/workflows/cicd_test.yml index 62f2df9..4a92fc0 100644 --- a/.github/workflows/cicd_test.yml +++ b/.github/workflows/cicd_test.yml @@ -1,7 +1,7 @@ name: cicd_test on: - # Run each time some code are push on any branch + # Run each time some code is pushed on any branch push: branches: - '**' @@ -25,8 +25,8 @@ jobs: activate-environment: pdal_ign_plugin environment-file: ./environment.yml auto-activate-base: true - - - name: compil_plugins + + - name: compile_plugins run: source ./ci/build.sh - name: test diff --git a/.gitignore b/.gitignore index d8c4899..ec967c4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,7 @@ xcode +.vscode +build install -__pycache__ +*/__pycache__ test/__pycache_ test/.idea diff --git a/Dockerfile b/Dockerfile index 3a1cbfd..2b9dbb5 100755 --- a/Dockerfile +++ b/Dockerfile @@ -1,27 +1,34 @@ FROM mambaorg/micromamba:bullseye-slim as build - + COPY environment_docker.yml /environment_docker.yml - -USER root -RUN micromamba env create -f /environment_docker.yml + +USER root +RUN micromamba env create -f /environment_docker.yml SHELL ["micromamba", "run", "-n", "pdal_ign_plugin", "/bin/bash", "-c"] RUN apt-get update && apt-get install --no-install-recommends -y cmake make build-essential g++ && rm -rf /var/lib/apt/lists/* - -COPY src src + +COPY src src COPY CMakeLists.txt CMakeLists.txt -COPY macro macro - -RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release + +RUN cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release RUN make -j4 install - + FROM debian:bullseye-slim - + COPY --from=build /opt/conda/envs/pdal_ign_plugin /opt/conda/envs/pdal_ign_plugin -RUN mkdir -p /pdal_ign_plugin +RUN mkdir -p /pdal_ign_plugin COPY --from=build /tmp/install/lib /pdal_ign_plugin/install/lib -COPY --from=build /tmp/macro /macro -ENV PATH=$PATH:/opt/conda/envs/pdal_ign_plugin/bin/ -ENV PROJ_LIB=/opt/conda/envs/pdal_ign_plugin/share/proj/ +ENV PATH=$PATH:/opt/conda/envs/pdal_ign_plugin/bin/ +ENV PROJ_LIB=/opt/conda/envs/pdal_ign_plugin/share/proj/ ENV PDAL_DRIVER_PATH=/pdal_ign_plugin/install/lib +# Install python macro module +COPY macro /pdal_ign_plugin/macro +COPY pyproject.toml /pdal_ign_plugin/pyproject.toml +WORKDIR /pdal_ign_plugin +RUN pip install . + +# Add example scripts + test data (to be able to test inside the docker image) +COPY scripts /pdal_ign_plugin/scripts +COPY test /pdal_ign_plugin/test \ No newline at end of file diff --git a/README.md b/README.md index c98a2f5..20f320e 100755 --- a/README.md +++ b/README.md @@ -2,40 +2,50 @@ ## Compile -You need to have conda ! +You need to have conda! + +Create the ign_pdal_tools conda environment using the `environment.yml` file +to be able to run the compilation in this environment. ### linux/mac run ci/build.sh -### Windows +### Windows one day, maybe... ## Architecture of the code -The code is structured as : +The code is structured as: ``` ├── src -│ ├── plugins forlder -│ │ ├── plufinFilter.cpp -│ │ ├── plufinFilter.h +│ ├── plugin folder +│ │ ├── pluginFilter.cpp +│ │ ├── pluginFilter.h │ │ ├── CMakeLists.txt ├── doc │ ├── pluginFilter.md ├── ci +├── macro # Python module with ready-to-use filters combinations +│   ├── __init__.py +│   ├── macro.py +│   └── version.py +├── scripts +│   ├── *.py # Example scripts to use the plugin filters + the filters combinations contained in `macro` ├── test ├── CMakeLists.txt ├── environment*.yml -├── Dockerfile -├── .github +├── Dockerfile +├── pyproject.toml # Setup file to install the `macro` python module with pip +├── .github └── .gitignore ``` ## Run the tests -Each plugin should have his own test. To run test : +Each plugin should have his own test. To run all tests: ``` python -m pytest -s @@ -43,27 +53,28 @@ python -m pytest -s ## List of Filters -[grid decimation](./doc/grid_decimation.md) +[grid decimation](./doc/grid_decimation.md) [Deprecated: use the gridDecimation filter from the pdal repository] [radius assign](./doc/radius_assign.md) ## Adding a filter -In order to add a filter, you have to add a new folder in the src directory : +In order to add a filter, you have to add a new folder in the src directory : ``` ├── src │ ├── filter_my_new_PI │ │ ├── my_new_PI_Filter.cpp │ │ ├── my_new_PI_Filter.h -│ │ ├── CMakeLisits.txt +│ │ ├── CMakeLists.txt ``` The name of the folder informs of the plugIN nature (reader, writer, filter). -The code should respect the documentation purpose by pdal : [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). Be careful to change if the plugIn is a reader, a writer or a filter. +The code should respect the documentation proposed by pdal: [build a pdal plugin](https://pdal.io/en/2.6.0/development/plugins.html). +Be careful to change if the plugIn is a reader, a writer or a filter. -The CMakeList should contains : +The CMakeList should contains: ``` file( GLOB_RECURSE GD_SRCS ${CMAKE_SOURCE_DIR} *) @@ -78,15 +89,45 @@ PDAL_CREATE_PLUGIN( install(TARGETS pdal_plugin_filter_my_new_PI) ``` -You should complet the principal CMakeList by adding the new plugIN : - +You should complete the main CMakeList by adding the new plugIN: + ``` add_subdirectory(src/filter_my_new_PI) ``` -Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). +Each plugIN has his own md file in the doc directory, structured as the [model](./doc/_doc_model_plugIN.md). + +Don't forget to update [the list](#list-of-filters) with a link to the documentation. + +## `macro` python module usage + +The `macro` python module is installed in the project docker image so that it can be imported from anywhere in the +docker image. + + +### Syntax to use it in a python script -D'ont forget to update [the list](#list-of-filters) with a link with the documentation. +```python +from macro import macro +marco.my_macro(...) +``` + +See the `scripts` folder for example usages of this module. + +### Usage from outside the docker image: + +If you have a python script on your computer, you can mount its containing folder as a volume in order to +run it in the docker image. + +Example: + +```bash +docker run \ + -v /my/data/folder:/data \ + -v /my/output/folder:/output \ + -v /my/script/folder:/scripts \ + pdal_ign_plugin \ + python /scripts/my_script.py --input /data/my_data_file.las -o /output/my_output.las +``` - diff --git a/ci/build.sh b/ci/build.sh index b226faf..a870025 100755 --- a/ci/build.sh +++ b/ci/build.sh @@ -1,7 +1,9 @@ #!/bin/sh +set -e + FILE=~/anaconda3/etc/profile.d/conda.sh -if [ -e ~/anaconda3/etc/profile.d/conda.sh ] +if [ -e ~/anaconda3/etc/profile.d/conda.sh ] then source ~/anaconda3/etc/profile.d/conda.sh elif [ -e ~/miniconda3/etc/profile.d/conda.sh ] @@ -10,8 +12,8 @@ then elif [ -e /usr/share/miniconda/etc/profile.d/conda.sh ] then source /usr/share/miniconda/etc/profile.d/conda.sh -elif [ -e ~/miniforge3/etc/profile.d/conda.sh ] -then +elif [ -e ~/miniforge3/etc/profile.d/conda.sh ] +then source ~/miniforge3/etc/profile.d/conda.sh elif [[ -z "${CONDASH}" ]]; then echo ERROR: Failed to load conda.sh : ~/anaconda3/etc/profile.d/conda.sh or ~/miniforge3/etc/profile.d/conda.sh or env CONDASH @@ -28,10 +30,10 @@ echo conda is $CONDA_PREFIX mkdir build cd build -cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release ../ +cmake -G"Unix Makefiles" -DCONDA_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=Release ../ make install conda deactivate cd .. -rm -rf build \ No newline at end of file +rm -rf build \ No newline at end of file diff --git a/environment.yml b/environment.yml index 695737c..9182325 100755 --- a/environment.yml +++ b/environment.yml @@ -13,7 +13,7 @@ dependencies: - isort # import sorting - flake8 # code analysis - pytest -# --------- pip & pip librairies --------- # +# --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools diff --git a/environment_docker.yml b/environment_docker.yml index 7e23ef6..9232812 100755 --- a/environment_docker.yml +++ b/environment_docker.yml @@ -6,8 +6,8 @@ dependencies: - pdal - python-pdal - gdal -# --------- pip & pip librairies --------- # + - pytest + # --------- pip & pip libraries --------- # - pip - pip: - ign-pdal-tools - diff --git a/macro/ex_filtering_points.py b/macro/ex_filtering_points.py deleted file mode 100755 index f79af6d..0000000 --- a/macro/ex_filtering_points.py +++ /dev/null @@ -1,94 +0,0 @@ -import argparse -import pdal -import macro - -""" -This tool shows how to use functions of macro in a pdal pipeline -""" - -def parse_args(): - parser = argparse.ArgumentParser("Tool to apply pdal pipelines for DSM and DTM calculation") - parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") - parser.add_argument("--output_las", "-o", type=str, required=True, help="Output cloud las file") - parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") - parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") - return parser.parse_args() - - -if __name__ == "__main__": - args = parse_args() - - pipeline = pdal.Reader.las(args.input) - - ## 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse - ## vegetation (3) proche de la végétation : on les affecte en 100 - - # bouche trou : assigne les points sol en 102 à l'intérieur de la veget (4,5) - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="Classification==2", condition_ref=macro.build_condition("Classification", [4,5]), condition_out="Classification=102") - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="Classification==102", condition_ref="Classification==2", condition_out="Classification=2") - - # selection des points de veget basse proche de la veget haute : assigne 103 - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="Classification==3", condition_ref="Classification==5", condition_out="Classification=103") - - # max des points de veget (et surement veget - 102,103) sur une grille régulière : assigne 100 - pipeline |= pdal.Filter.gridDecimation(resolution=0.75, value="Classification=100", output_type="max", where=macro.build_condition("Classification", [4,5,102,103])) - - # remise à zero des codes 102 et 103 - pipeline |= pdal.Filter.assign(value="Classification=2", where="Classification==102") - pipeline |= pdal.Filter.assign(value="Classification=3", where="Classification==103") - - ## 2 - sélection des points pour DTM et DSM - - # selection de points sol (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation(resolution=0.5, value="Classification=102", output_type="max", where="Classification==2") - - # selection de points DSM (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation(resolution=0.5, value="Classification=200", output_type="max", where=macro.build_condition("Classification", [2,3,4,5,6,9,17,64,100])) - - # assigne des points sol sélectionnés (102) en 100 : les points proches de la végaétation, des ponts, de l'eau et 64 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==102", - condition_ref=macro.build_condition("Classification", [4,5,6,9,17,64,100]), condition_out="Classification=100") - - # remise à zero du code 102 - pipeline |= pdal.Filter.assign(value="Classification=2", where="Classification==102") - - ## 3 - gestion des ponts - - - - # bouche trou : on élimine les points sol (2) au milieu du pont en les mettant à 102 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==2", condition_ref="Classification==17", condition_out="Classification=102") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==102", - condition_ref=macro.build_condition("Classification", [2,3,4,5]), condition_out="Classification=2") - - # bouche trou : on élimine les points basse végétation (3) au milieu du pont en les mettant à 103 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==3", condition_ref="Classification==17", condition_out="Classification=103") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==103", - condition_ref=macro.build_condition("Classification", [2,3,4,5]), condition_out="Classification=3") - - # bouche trou : on élimine les points moyenne végétation (4) au milieu du pont en les mettant à 104 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==4", condition_ref="Classification==17", condition_out="Classification=104") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==104", - condition_ref=macro.build_condition("Classification", [2,3,4,5]), condition_out="Classification=4") - - # bouche trou : on élimine les points haute végétation (5) au milieu du pont en les mettant à 105 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==5", condition_ref="Classification==17", condition_out="Classification=105") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==105", - condition_ref=macro.build_condition("Classification", [2,3,4,5]), condition_out="Classification=5") - - # bouche trou : on élimine les points eau (9) au milieu du pont en les mettant à 109 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==9", condition_ref="Classification==17", condition_out="Classification=109") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="Classification==109", - condition_ref="Classification==9", condition_out="Classification=9") - - # step 15 et supression des points ?? - - # 4 - export du nuage - pipeline |= pdal.Writer.las(extra_dims="all",forward="all",filename=args.output_las) - - # export des DSM/DTM - pipeline |= pdal.Writer.gdal(gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dtm, where=macro.build_condition("Classification", [2,66])) - pipeline |= pdal.Writer.gdal(gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dsm, where=macro.build_condition("Classification", [2,3,4,5,17,64])) - - pipeline.execute() - diff --git a/macro/ex_filtering_points_with_add_dimensions.py b/macro/ex_filtering_points_with_add_dimensions.py deleted file mode 100755 index 107a263..0000000 --- a/macro/ex_filtering_points_with_add_dimensions.py +++ /dev/null @@ -1,78 +0,0 @@ -import argparse -import pdal -import macro - -""" -This tool shows how to use functions of macro in a pdal pipeline -""" - -def parse_args(): - parser = argparse.ArgumentParser("Tool to apply pdal pipelines for DSM and DTM calculation (with add dimensions for the concerned points)") - parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") - parser.add_argument("--output_las", "-o", type=str, required=True, help="Output cloud las file") - parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") - parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") - return parser.parse_args() - - -if __name__ == "__main__": - args = parse_args() - - pipeline = pdal.Reader.las(args.input) - - # 0 - ajout de dimensions temporaires - pipeline |= pdal.Filter.ferry(dimensions=f"=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE") - - - ## 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse - ## vegetation (3) proche de la végétation - ## pour le calcul du DSM - - pipeline |= pdal.Filter.assign(value=["PT_VEG_DSM = 1 WHERE " + macro.build_condition("Classification", [4,5])]) - - # bouche trou : assigne les points sol à l'intérieur de la veget (4,5) - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="Classification==2", condition_ref=macro.build_condition("Classification", [4,5]), condition_out="PT_VEG_DSM=1") - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="PT_VEG_DSM==1 && Classification==2", condition_ref="Classification==2", condition_out="PT_VEG_DSM=0") - - # selection des points de veget basse proche de la veget haute - pipeline = macro.add_radius_assign(pipeline, 1, False, condition_src="Classification==3", condition_ref="Classification==5", condition_out="PT_VEG_DSM=1") - - # max des points de veget (PT_VEG_DSM==1) sur une grille régulière : - pipeline |= pdal.Filter.gridDecimation(resolution=0.75, value="PT_GRID_DSM=1", output_type="max", where="PT_VEG_DSM==1") - - - ## 2 - sélection des points pour DTM et DSM - - # selection de points DTM (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation(resolution=0.5, value="PT_GRID_DTM=1", output_type="max", where="Classification==2") - - # selection de points DSM (max) sur une grille régulière - pipeline |= pdal.Filter.gridDecimation(resolution=0.5, value="PT_GRID_DSM=1", output_type="max", - where="(" + macro.build_condition("Classification", [6,9,17,64]) + ") || PT_GRID_DSM==1") - - # assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64 - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="PT_GRID_DTM==1", - condition_ref=macro.build_condition("Classification", [4,5,6,9,17,64]), - condition_out="PT_GRID_DSM=1") - - - ## 3 - gestion des ponts - # bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1 - - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src=macro.build_condition("Classification", [2,3,4,5,9]), condition_ref="Classification==17", condition_out="PT_ON_BRIDGE=1") - pipeline = macro.add_radius_assign(pipeline, 1.5, False, condition_src="PT_ON_BRIDGE==1", - condition_ref=macro.build_condition("Classification", [2,3,4,5]), condition_out="PT_ON_BRIDGE=0") - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=0 WHERE PT_ON_BRIDGE==1"]) - - - ## 4 - point pour DTM servent au DSM également - pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"]) - - ## 5 - export du nuage et des DSM - - pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) - pipeline |= pdal.Writer.gdal(gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dtm, where="PT_GRID_DTM==1") - pipeline |= pdal.Writer.gdal(gdaldriver="GTiff", output_type="max", resolution=2.0, filename=args.output_dsm, where="PT_GRID_DSM==1") - - pipeline.execute() - diff --git a/macro/macro.py b/macro/macro.py index 71d3684..4a8b04c 100755 --- a/macro/macro.py +++ b/macro/macro.py @@ -1,4 +1,3 @@ -import argparse import pdal """ @@ -6,58 +5,66 @@ """ -def add_radius_assign(pipeline, radius, search_3d, condition_src, condition_ref, condition_out ): +def add_radius_assign(pipeline, radius, search_3d, condition_src, condition_ref, condition_out): """ search points from "condition_src" closed from "condition_ref", and reassign them to "condition_out" This combination is equivalent to the CloseBy macro of TerraScan radius : the search distance - search_3d : the distance reseach is in 3d if True + search_3d : the distance research is in 3d if True condition_src, condition_ref, condition_out : a pdal condition as "Classification==2" """ pipeline |= pdal.Filter.ferry(dimensions=f"=>REF_DOMAIN, =>SRC_DOMAIN, =>radius_search") - pipeline |= pdal.Filter.assign(value=["SRS_DOMAIN = 0", f"SRC_DOMAIN = 1 WHERE {condition_src}", - "REF_DOMAIN = 0", f"REF_DOMAIN = 1 WHERE {condition_ref}", - "radius_search = 0"]) - pipeline |= pdal.Filter.radius_assign(radius=radius, src_domain="SRC_DOMAIN",reference_domain="REF_DOMAIN", - output_dimension="radius_search", is3d=search_3d) - pipeline |= pdal.Filter.assign(value=condition_out,where="radius_search==1") + pipeline |= pdal.Filter.assign( + value=[ + "SRC_DOMAIN = 0", + f"SRC_DOMAIN = 1 WHERE {condition_src}", + "REF_DOMAIN = 0", + f"REF_DOMAIN = 1 WHERE {condition_ref}", + "radius_search = 0", + ] + ) + pipeline |= pdal.Filter.radius_assign( + radius=radius, + src_domain="SRC_DOMAIN", + reference_domain="REF_DOMAIN", + output_dimension="radius_search", + is3d=search_3d, + ) + pipeline |= pdal.Filter.assign(value=condition_out, where="radius_search==1") return pipeline -def classify_hgt_ground(pipeline, hmin, hmax, condition, condition_out): +def classify_hgt_ground(pipeline, h_min, h_max, condition, condition_out): """ - reassign points from "condition" between "hmin" and "hmax" of the ground to "condition_out" - This combination is equivalent to the ClassifyHgtGrd macro of TerraScan - condition, condition_out : a pdal condition as "Classification==2" + reassign points from "condition" between "h_min" and "h_max" of the ground to "condition_out" + This combination is equivalent to the ClassifyHgtGrd macro of TerraScan + condition, condition_out : a pdal condition as "Classification==2" """ pipeline |= pdal.Filter.hag_delaunay(allow_extrapolation=True) - condition_h = f"HeightAboveGround>{hmin} && HeightAboveGround<={hmax}" + condition_h = f"HeightAboveGround>{h_min} && HeightAboveGround<={h_max}" condition_h += " && " + condition pipeline |= pdal.Filter.assign(value=condition_out, where=condition_h) return pipeline - def keep_non_planar_pts(pipeline, condition, condition_out): """ - reassign points from "condition" who are planar to "condition_out" - This combination is equivalent to the ClassifyModelKey macro of TerraScan - condition, condition_out : a pdal condition as "Classification==2" + reassign points from "condition" who are planar to "condition_out" + This combination is equivalent to the ClassifyModelKey macro of TerraScan + condition, condition_out : a pdal condition as "Classification==2" """ - pipeline |= pdal.Filter.approximatecoplanar(knn=8,thresh1=25,thresh2=6,where=condition) - pipeline |= pdal.Filter.assign(value=condition_out,where=f"Coplanar==0 && ({condition})") + pipeline |= pdal.Filter.approximatecoplanar(knn=8, thresh1=25, thresh2=6, where=condition) + pipeline |= pdal.Filter.assign(value=condition_out, where=f"Coplanar==0 && ({condition})") return pipeline - - def build_condition(key, values): """ - build 'key==values[0] || key==values[1] ...' + build 'key==values[0] || key==values[1] ...' """ condition = "" for v in values: - condition += key+"=="+str(v) - if v!=values[-1]:condition += " || " + condition += key + "==" + str(v) + if v != values[-1]: + condition += " || " return condition - diff --git a/macro/version.py b/macro/version.py new file mode 100644 index 0000000..74443dd --- /dev/null +++ b/macro/version.py @@ -0,0 +1,5 @@ +__version__ = "0.1.0" + + +if __name__ == "__main__": + print(__version__) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..165ffe3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[project] +name = "pdal_ign_macro" +dynamic = ["version"] +dependencies = [] + +[tool.setuptools.dynamic] +version = { attr = "macro.version.__version__" } + +[tool.setuptools] +packages = ["macro"] + +[tool.black] +line-length = 99 +include = '\.pyi?$' +exclude = ''' +/( + \.toml + |\.sh + |\.git + |\.ini + |\.bat + | data +)/ +''' + +[tool.isort] +profile = "black" diff --git a/scripts/ex_filtering_points.py b/scripts/ex_filtering_points.py new file mode 100755 index 0000000..b672da8 --- /dev/null +++ b/scripts/ex_filtering_points.py @@ -0,0 +1,210 @@ +import argparse +import pdal +from macro import macro + +""" +This tool shows how to use functions of macro in a pdal pipeline +""" + + +def parse_args(): + parser = argparse.ArgumentParser("Tool to apply pdal pipelines for DSM and DTM calculation") + parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") + parser.add_argument( + "--output_las", "-o", type=str, required=True, help="Output cloud las file" + ) + parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") + parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + + pipeline = pdal.Reader.las(args.input) + + ## 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse + ## vegetation (3) proche de la végétation : on les affecte en 100 + + # bouche trou : assigne les points sol en 102 à l'intérieur de la veget (4,5) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==2", + condition_ref=macro.build_condition("Classification", [4, 5]), + condition_out="Classification=102", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==102", + condition_ref="Classification==2", + condition_out="Classification=2", + ) + + # selection des points de veget basse proche de la veget haute : assigne 103 + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==3", + condition_ref="Classification==5", + condition_out="Classification=103", + ) + + # max des points de veget (et surement veget - 102,103) sur une grille régulière : assigne 100 + pipeline |= pdal.Filter.gridDecimation( + resolution=0.75, + value="Classification=100", + output_type="max", + where=macro.build_condition("Classification", [4, 5, 102, 103]), + ) + + # remise à zero des codes 102 et 103 + pipeline |= pdal.Filter.assign(value="Classification=2", where="Classification==102") + pipeline |= pdal.Filter.assign(value="Classification=3", where="Classification==103") + + ## 2 - sélection des points pour DTM et DSM + + # selection de points sol (max) sur une grille régulière + pipeline |= pdal.Filter.gridDecimation( + resolution=0.5, value="Classification=102", output_type="max", where="Classification==2" + ) + + # selection de points DSM (max) sur une grille régulière + pipeline |= pdal.Filter.gridDecimation( + resolution=0.5, + value="Classification=200", + output_type="max", + where=macro.build_condition("Classification", [2, 3, 4, 5, 6, 9, 17, 64, 100]), + ) + + # assigne des points sol sélectionnés (102) en 100 : les points proches de la végaétation, des ponts, de l'eau et 64 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==102", + condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64, 100]), + condition_out="Classification=100", + ) + + # remise à zero du code 102 + pipeline |= pdal.Filter.assign(value="Classification=2", where="Classification==102") + + ## 3 - gestion des ponts + + # bouche trou : on élimine les points sol (2) au milieu du pont en les mettant à 102 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==2", + condition_ref="Classification==17", + condition_out="Classification=102", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==102", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="Classification=2", + ) + + # bouche trou : on élimine les points basse végétation (3) au milieu du pont en les mettant à 103 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==3", + condition_ref="Classification==17", + condition_out="Classification=103", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==103", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="Classification=3", + ) + + # bouche trou : on élimine les points moyenne végétation (4) au milieu du pont en les mettant à 104 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==4", + condition_ref="Classification==17", + condition_out="Classification=104", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==104", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="Classification=4", + ) + + # bouche trou : on élimine les points haute végétation (5) au milieu du pont en les mettant à 105 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==5", + condition_ref="Classification==17", + condition_out="Classification=105", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==105", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="Classification=5", + ) + + # bouche trou : on élimine les points eau (9) au milieu du pont en les mettant à 109 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==9", + condition_ref="Classification==17", + condition_out="Classification=109", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="Classification==109", + condition_ref="Classification==9", + condition_out="Classification=9", + ) + + # step 15 et supression des points ?? + + # 4 - export du nuage + pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) + + # export des DSM/DTM + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dtm, + where=macro.build_condition("Classification", [2, 66]), + ) + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dsm, + where=macro.build_condition("Classification", [2, 3, 4, 5, 17, 64]), + ) + + pipeline.execute() diff --git a/scripts/ex_filtering_points_with_add_dimensions.py b/scripts/ex_filtering_points_with_add_dimensions.py new file mode 100755 index 0000000..f3f06fb --- /dev/null +++ b/scripts/ex_filtering_points_with_add_dimensions.py @@ -0,0 +1,143 @@ +import argparse +import pdal +from macro import macro + +""" +This tool shows how to use functions of macro in a pdal pipeline +""" + + +def parse_args(): + parser = argparse.ArgumentParser( + "Tool to apply pdal pipelines for DSM and DTM calculation (with add dimensions for the concerned points)" + ) + parser.add_argument("--input", "-i", type=str, required=True, help="Input las file") + parser.add_argument( + "--output_las", "-o", type=str, required=True, help="Output cloud las file" + ) + parser.add_argument("--output_dsm", "-s", type=str, required=True, help="Output dsm tiff file") + parser.add_argument("--output_dtm", "-t", type=str, required=True, help="Output dtm tiff file") + return parser.parse_args() + + +if __name__ == "__main__": + args = parse_args() + + pipeline = pdal.Reader.las(args.input) + + # 0 - ajout de dimensions temporaires + pipeline |= pdal.Filter.ferry( + dimensions=f"=>PT_GRID_DSM, =>PT_VEG_DSM, =>PT_GRID_DTM, =>PT_ON_BRIDGE" + ) + + ## 1 - recherche des points max de végétation (4,5) sur une grille régulière, avec prise en compte des points sol (2) et basse + ## vegetation (3) proche de la végétation + ## pour le calcul du DSM + + pipeline |= pdal.Filter.assign( + value=["PT_VEG_DSM = 1 WHERE " + macro.build_condition("Classification", [4, 5])] + ) + + # bouche trou : assigne les points sol à l'intérieur de la veget (4,5) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==2", + condition_ref=macro.build_condition("Classification", [4, 5]), + condition_out="PT_VEG_DSM=1", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="PT_VEG_DSM==1 && Classification==2", + condition_ref="Classification==2", + condition_out="PT_VEG_DSM=0", + ) + + # selection des points de veget basse proche de la veget haute + pipeline = macro.add_radius_assign( + pipeline, + 1, + False, + condition_src="Classification==3", + condition_ref="Classification==5", + condition_out="PT_VEG_DSM=1", + ) + + # max des points de veget (PT_VEG_DSM==1) sur une grille régulière : + pipeline |= pdal.Filter.gridDecimation( + resolution=0.75, value="PT_GRID_DSM=1", output_type="max", where="PT_VEG_DSM==1" + ) + + ## 2 - sélection des points pour DTM et DSM + + # selection de points DTM (max) sur une grille régulière + pipeline |= pdal.Filter.gridDecimation( + resolution=0.5, value="PT_GRID_DTM=1", output_type="max", where="Classification==2" + ) + + # selection de points DSM (max) sur une grille régulière + pipeline |= pdal.Filter.gridDecimation( + resolution=0.5, + value="PT_GRID_DSM=1", + output_type="max", + where="(" + + macro.build_condition("Classification", [6, 9, 17, 64]) + + ") || PT_GRID_DSM==1", + ) + + # assigne des points sol sélectionnés : les points proches de la végétation, des ponts, de l'eau, 64 + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="PT_GRID_DTM==1", + condition_ref=macro.build_condition("Classification", [4, 5, 6, 9, 17, 64]), + condition_out="PT_GRID_DSM=1", + ) + + ## 3 - gestion des ponts + # bouche trou : on filtre les points (2,3,4,5,9) au milieu du pont en les mettant à PT_ON_BRIDGE=1 + + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src=macro.build_condition("Classification", [2, 3, 4, 5, 9]), + condition_ref="Classification==17", + condition_out="PT_ON_BRIDGE=1", + ) + pipeline = macro.add_radius_assign( + pipeline, + 1.5, + False, + condition_src="PT_ON_BRIDGE==1", + condition_ref=macro.build_condition("Classification", [2, 3, 4, 5]), + condition_out="PT_ON_BRIDGE=0", + ) + pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=0 WHERE PT_ON_BRIDGE==1"]) + + ## 4 - point pour DTM servent au DSM également + pipeline |= pdal.Filter.assign(value=["PT_GRID_DSM=1 WHERE PT_GRID_DTM==1"]) + + ## 5 - export du nuage et des DSM + + pipeline |= pdal.Writer.las(extra_dims="all", forward="all", filename=args.output_las) + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dtm, + where="PT_GRID_DTM==1", + ) + pipeline |= pdal.Writer.gdal( + gdaldriver="GTiff", + output_type="max", + resolution=2.0, + filename=args.output_dsm, + where="PT_GRID_DSM==1", + ) + + pipeline.execute() diff --git a/test/utils.py b/test/utils.py index 71334cc..cb042bc 100755 --- a/test/utils.py +++ b/test/utils.py @@ -7,5 +7,6 @@ def pdal_has_plugin(name_filter): print("init pdal driver : ", os.environ["PDAL_DRIVER_PATH"]) result = subprocess.run(["pdal", "--drivers"], stdout=subprocess.PIPE) + print(result.stdout.decode("utf-8")) if name_filter not in result.stdout.decode("utf-8"): - raise ValueError("le script " + name_filter + " n'est pas visible") + raise ValueError("script " + name_filter + " not found by `pdal --drivers`.")