diff --git a/.github/workflows/predict-for-lidar-prod-optimization.yml b/.github/workflows/predict-for-lidar-prod-optimization.yml new file mode 100644 index 00000000..ff8bf15d --- /dev/null +++ b/.github/workflows/predict-for-lidar-prod-optimization.yml @@ -0,0 +1,102 @@ +# Workflow name +name: "Prediction on lidar-prod optimization dataset" + +on: + # Run workflow on user request + workflow_dispatch: + inputs: + user: + description: | + Username : + Utilisé pour générer un chemin standard pour les sorties dans le + dossier IA du store (projet-LHD/IA/MYRIA3D-SHARED-WORKSPACE/$USER/$SAMPLING_NAME/) + required: true + sampling_name: + description: | + Sampling name : + Nom du dataset sur lequel le modèle a été entraîné. + Utilisé pour générer un chemin standard pour les sorties dans le + dossier IA du store (projet-LHD/IA/MYRIA3D-SHARED-WORKSPACE/$USER/$SAMPLING_NAME/) + Eg. YYYYMMDD_MonBeauDataset + required: true + model_id: + description: | + Identifiant du modèle : + Il correspond au nom du fichier checkpoint à utiliser pour les prédictions (sans l'extension !) + ($MODEL_ID.ckpt doit exister dans projet-LHD/IA/MYRIA3D-SHARED-WORKSPACE/$USER/$SAMPLING_NAME/) + Il est aussi utilisé pour générer le dossier de sortie + (projet-LHD/IA/LIDAR-PROD-OPTIMIZATION//$SAMPLING_NAME/$MODEL_ID) + Exemple : YYYMMDD_MonBeauSampling_epochXXX_Myria3Dx.y.z + required: true + predict_config_name: + description: | + Nom du fichier de config de myria3d (fichier .yaml) à utiliser pour la prédiction + (doit exister dans projet-LHD/IA/MYRIA3D-SHARED-WORKSPACE/$USER/$SAMPLING_NAME/) + Exemple: YYYMMDD_MonBeauSampling_epochXXX_Myria3Dx.y.z_predict_config_Vx.y.z.yaml + required: true + +jobs: + predict-validation-dataset: + runs-on: self-hosted + env: + OUTPUT_DIR: /var/data/LIDAR-PROD-OPTIMIZATION/${{ github.event.inputs.sampling_name }}/${{ github.event.inputs.model_id }} + DATA: /var/data/LIDAR-PROD-OPTIMIZATION/20221018_lidar-prod-optimization-on-151-proto/Comparison/ + LOGS_DIR: /var/data/MYRIA3D-SHARED-WORKSPACE/${{ github.event.inputs.user }}/${{ github.event.inputs.sampling_name }}/ + BATCH_SIZE: 5 + + steps: + - name: Log configuration + run: | + echo "Run prediction on lidar-prod optimzation datasets (val and test)" + echo "Sampling name: ${{ github.event.inputs.sampling_name }}" + echo "User name: ${{ github.event.inputs.user }}" + echo "checkpoint name ${{ github.event.inputs.model_id }}" + echo "predict_config_name name ${{ github.event.inputs.predict_config_name }}" + echo "output_dir: ${{env.OUTPUT_DIR}}" + echo "data: ${{env.DATA}}" + echo "logs dir: ${{env.LOGS_DIR}}" + + - name: Checkout branch + uses: actions/checkout@v4 + + # See https://github.com/marketplace/actions/setup-micromamba + - name: setup-micromamba + uses: mamba-org/setup-micromamba@v1.8.1 + with: + environment-file: environment.yml + environment-name: myria3d # activate the environment + cache-environment-key: environment-myria3d-predict-validation-dataset # create cache for this pipeline only + # Do not restore downloads as they are already stored by micromamba + # cache-downloads-key: downloads-myria3d-predict-validation-dataset + generate-run-shell: true + download-micromamba: false + micromamba-binary-path: /var/data/.local/bin/micromamba + micromamba-root-path: /var/data/micromamba + + - name: Run prediction on validation dataset + shell: micromamba-shell {0} + run: > + python run.py + --config-path ${{env.LOGS_DIR}} + --config-name ${{ github.event.inputs.predict_config_name }} + task.task_name=predict + predict.src_las=${{env.DATA}}/val/*.laz + predict.ckpt_path=${{env.LOGS_DIR}}${{ github.event.inputs.model_id }}.ckpt + predict.output_dir=${{env.OUTPUT_DIR}}/preds-valset/ + predict.interpolator.probas_to_save=[building] + predict.gpus=0 + datamodule.batch_size=${{env.BATCH_SIZE}} + + - name: Run prediction on test dataset + shell: micromamba-shell {0} + run: > + python run.py + --config-path ${{env.LOGS_DIR}} + --config-name ${{ github.event.inputs.predict_config_name }} + task.task_name=predict + predict.src_las=${{env.DATA}}/test/*.laz + predict.ckpt_path=${{env.LOGS_DIR}}${{ github.event.inputs.model_id }} + predict.output_dir=${{env.OUTPUT_DIR}}/preds-testset/ + predict.interpolator.probas_to_save=[building] + predict.gpus=0 + datamodule.batch_size=${{env.BATCH_SIZE}} diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e793776..701893d4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # CHANGELOG +- Add a github action workflow to run a trained model on the lidar-prod thresholds optimisation dataset +(in order to automate thresholds optimisation) + ### 3.8.4 - fix: move IoU appropriately to fix wrong device error created by a breaking change in torch when using DDP.