From 6fb59f79ec5717f63c3a9f533d5e9ec5d921b30d Mon Sep 17 00:00:00 2001
From: cdfhalle
Date: Mon, 16 Dec 2024 16:29:58 +0100
Subject: [PATCH 1/5] implement option to export proofreading as segmentation
---
app/controllers/JobController.scala | 9 +++++++--
conf/webknossos.latest.routes | 2 +-
frontend/javascripts/admin/api/jobs.ts | 12 ++++++++++++
.../view/action-bar/starting_job_modals.tsx | 15 ++++++++++++++-
4 files changed, 34 insertions(+), 4 deletions(-)
diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala
index 4ccfdea0bd..ce76edeb74 100644
--- a/app/controllers/JobController.scala
+++ b/app/controllers/JobController.scala
@@ -379,7 +379,9 @@ class JobController @Inject()(
newDatasetName: String,
outputSegmentationLayerName: String,
mergeSegments: Boolean,
- volumeLayerName: Option[String]): Action[AnyContent] =
+ volumeLayerName: Option[String],
+ includesProofreading: Boolean,
+ selectedBoundingBox: Option[String]): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
@@ -393,6 +395,7 @@ class JobController @Inject()(
command = JobCommand.materialize_volume_annotation
_ <- datasetService.assertValidDatasetName(newDatasetName)
_ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName)
+ selectedBoundingBoxParsed <- Fox.runIf(includesProofreading)(selectedBoundingBox.toFox)
commandArgs = Json.obj(
"organization_id" -> organization._id,
"dataset_name" -> dataset.name,
@@ -403,7 +406,9 @@ class JobController @Inject()(
"annotation_type" -> annotationType,
"new_dataset_name" -> newDatasetName,
"merge_segments" -> mergeSegments,
- "volume_layer_name" -> volumeLayerName
+ "volume_layer_name" -> volumeLayerName,
+ "includes_proofreading" -> includesProofreading,
+ "selected_bounding_box" -> selectedBoundingBoxParsed
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunApplyMergerMode"
js <- jobService.publicWrites(job)
diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes
index 76e22722a2..117e6fdb25 100644
--- a/conf/webknossos.latest.routes
+++ b/conf/webknossos.latest.routes
@@ -268,7 +268,7 @@ POST /jobs/run/inferNuclei/:datasetId
POST /jobs/run/inferNeurons/:datasetId controllers.JobController.runInferNeuronsJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/inferMitochondria/:datasetId controllers.JobController.runInferMitochondriaJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/alignSections/:datasetId controllers.JobController.runAlignSectionsJob(datasetId: String, layerName: String, newDatasetName: String, annotationId: Option[String])
-POST /jobs/run/materializeVolumeAnnotation/:datasetId controllers.JobController.runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String])
+POST /jobs/run/materializeVolumeAnnotation/:datasetId controllers.JobController.runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String], includesProofreading: Boolean, selectedBoundingBox: Option[String])
POST /jobs/run/findLargestSegmentId/:datasetId controllers.JobController.runFindLargestSegmentIdJob(datasetId: String, layerName: String)
POST /jobs/run/renderAnimation/:datasetId controllers.JobController.runRenderAnimationJob(datasetId: String)
GET /jobs/:id controllers.JobController.get(id: String)
diff --git a/frontend/javascripts/admin/api/jobs.ts b/frontend/javascripts/admin/api/jobs.ts
index 3327d61884..350f74b8ed 100644
--- a/frontend/javascripts/admin/api/jobs.ts
+++ b/frontend/javascripts/admin/api/jobs.ts
@@ -208,6 +208,8 @@ function startSegmentationAnnotationDependentJob(
annotationId: string,
annotationType: APIAnnotationType,
mergeSegments?: boolean,
+ includesProofreading?: boolean,
+ selectedBoundingBox?: Vector6,
): Promise {
const requestURL = new URL(`/api/jobs/run/${jobURLPath}/${datasetId}`, location.origin);
if (volumeLayerName != null) {
@@ -222,6 +224,12 @@ function startSegmentationAnnotationDependentJob(
if (mergeSegments != null) {
requestURL.searchParams.append("mergeSegments", mergeSegments.toString());
}
+ if (includesProofreading != null) {
+ requestURL.searchParams.append("includesProofreading", includesProofreading.toString());
+ }
+ if (selectedBoundingBox) {
+ requestURL.searchParams.append("selectedBoundingBox", selectedBoundingBox.toString());
+ }
return Request.receiveJSON(requestURL.href, {
method: "POST",
});
@@ -235,6 +243,8 @@ export function startMaterializingVolumeAnnotationJob(
annotationId: string,
annotationType: APIAnnotationType,
mergeSegments: boolean,
+ includesProofreading: boolean,
+ selectedBoundingBox?: Vector6,
): Promise {
return startSegmentationAnnotationDependentJob(
"materializeVolumeAnnotation",
@@ -245,6 +255,8 @@ export function startMaterializingVolumeAnnotationJob(
annotationId,
annotationType,
mergeSegments,
+ includesProofreading,
+ selectedBoundingBox,
);
}
diff --git a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx
index 3fa2fdb69f..6210e75831 100644
--- a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx
+++ b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx
@@ -891,6 +891,7 @@ export function MaterializeVolumeAnnotationModal({
}: MaterializeVolumeAnnotationModalProps) {
const dataset = useSelector((state: OxalisState) => state.dataset);
const tracing = useSelector((state: OxalisState) => state.tracing);
+ let includesProofreading = false;
const activeSegmentationTracingLayer = useSelector(getActiveSegmentationTracingLayer);
const fixedSelectedLayer = selectedVolumeLayer || activeSegmentationTracingLayer;
const readableVolumeLayerName =
@@ -925,6 +926,8 @@ export function MaterializeVolumeAnnotationModal({
output dataset and the output segmentation layer.
);
+ } else {
+ includesProofreading = tracing.volumes.some((v) => v.hasEditableMapping === true);
}
const jobImage =
jobNameToImagePath[jobName] != null ? (
@@ -954,8 +957,13 @@ export function MaterializeVolumeAnnotationModal({
jobName={"materialize_volume_annotation"}
suggestedDatasetSuffix="with_merged_segmentation"
chooseSegmentationLayer
+ isBoundingBoxConfigurable={includesProofreading}
fixedSelectedLayer={fixedSelectedLayer}
- jobApiCall={async ({ newDatasetName, selectedLayer: segmentationLayer }) => {
+ jobApiCall={async ({
+ newDatasetName,
+ selectedLayer: segmentationLayer,
+ selectedBoundingBox,
+ }) => {
// There are 3 cases for the value assignments to volumeLayerName and baseSegmentationName for the job:
// 1. There is a volume annotation with a fallback layer. volumeLayerName will reference the volume layer
// and baseSegmentationName will reference the fallback layer. The job will merge those layers.
@@ -968,6 +976,9 @@ export function MaterializeVolumeAnnotationModal({
? getReadableNameOfVolumeLayer(segmentationLayer, tracing)
: null;
const baseSegmentationName = getBaseSegmentationName(segmentationLayer);
+ const bbox = selectedBoundingBox?.boundingBox
+ ? computeArrayFromBoundingBox(selectedBoundingBox.boundingBox)
+ : undefined;
return startMaterializingVolumeAnnotationJob(
dataset.id,
baseSegmentationName,
@@ -976,6 +987,8 @@ export function MaterializeVolumeAnnotationModal({
tracing.annotationId,
tracing.annotationType,
isMergerModeEnabled,
+ includesProofreading,
+ bbox,
);
}}
description={
From 63f31b87497b266af48b463c0cb86d3ff7b16933 Mon Sep 17 00:00:00 2001
From: cdfhalle
Date: Mon, 16 Dec 2024 17:18:15 +0100
Subject: [PATCH 2/5] edit changelog
---
CHANGELOG.unreleased.md | 1 +
1 file changed, 1 insertion(+)
diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md
index e4e02843cd..49cc84039f 100644
--- a/CHANGELOG.unreleased.md
+++ b/CHANGELOG.unreleased.md
@@ -13,6 +13,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
### Added
- Added the total volume of a dataset to a tooltip in the dataset info tab. [#8229](https://github.com/scalableminds/webknossos/pull/8229)
- Optimized performance of data loading with “fill value“ chunks. [#8271](https://github.com/scalableminds/webknossos/pull/8271)
+- Added the option to export proofreading as segmentation [#8286](https://github.com/scalableminds/webknossos/pull/8286)
### Changed
- Renamed "resolution" to "magnification" in more places within the codebase, including local variables. [#8168](https://github.com/scalableminds/webknossos/pull/8168)
From a935314454a06ca3ff165a759abcb3825b37c59c Mon Sep 17 00:00:00 2001
From: cdfhalle
Date: Mon, 16 Dec 2024 18:14:07 +0100
Subject: [PATCH 3/5] add check for bounding box limits to backend code
---
app/controllers/JobController.scala | 3 +++
1 file changed, 3 insertions(+)
diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala
index ce76edeb74..1eda57a912 100644
--- a/app/controllers/JobController.scala
+++ b/app/controllers/JobController.scala
@@ -396,6 +396,9 @@ class JobController @Inject()(
_ <- datasetService.assertValidDatasetName(newDatasetName)
_ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName)
selectedBoundingBoxParsed <- Fox.runIf(includesProofreading)(selectedBoundingBox.toFox)
+ multiUser <- multiUserDAO.findOne(request.identity._multiUser)
+ _ <- Fox.runIf(!multiUser.isSuperUser && selectedBoundingBoxParsed.isDefined)(
+ jobService.assertBoundingBoxLimits(selectedBoundingBoxParsed, None))
commandArgs = Json.obj(
"organization_id" -> organization._id,
"dataset_name" -> dataset.name,
From 70b5ef08c211a98c9951be2bb5fc2e8ce2483d65 Mon Sep 17 00:00:00 2001
From: cdfhalle
Date: Mon, 16 Dec 2024 18:28:34 +0100
Subject: [PATCH 4/5] added missing get
---
app/controllers/JobController.scala | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala
index 1eda57a912..a62a6b33b0 100644
--- a/app/controllers/JobController.scala
+++ b/app/controllers/JobController.scala
@@ -398,7 +398,7 @@ class JobController @Inject()(
selectedBoundingBoxParsed <- Fox.runIf(includesProofreading)(selectedBoundingBox.toFox)
multiUser <- multiUserDAO.findOne(request.identity._multiUser)
_ <- Fox.runIf(!multiUser.isSuperUser && selectedBoundingBoxParsed.isDefined)(
- jobService.assertBoundingBoxLimits(selectedBoundingBoxParsed, None))
+ jobService.assertBoundingBoxLimits(selectedBoundingBoxParsed.get, None))
commandArgs = Json.obj(
"organization_id" -> organization._id,
"dataset_name" -> dataset.name,
From 721fc70d425b1b84aeb9ffa6e1bb422fd21bdf3c Mon Sep 17 00:00:00 2001
From: Florian M
Date: Tue, 17 Dec 2024 15:02:58 +0100
Subject: [PATCH 5/5] refactor backend (avoid the .get)
---
app/controllers/JobController.scala | 7 +++----
1 file changed, 3 insertions(+), 4 deletions(-)
diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala
index a62a6b33b0..e51fabbc1e 100644
--- a/app/controllers/JobController.scala
+++ b/app/controllers/JobController.scala
@@ -395,10 +395,9 @@ class JobController @Inject()(
command = JobCommand.materialize_volume_annotation
_ <- datasetService.assertValidDatasetName(newDatasetName)
_ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName)
- selectedBoundingBoxParsed <- Fox.runIf(includesProofreading)(selectedBoundingBox.toFox)
multiUser <- multiUserDAO.findOne(request.identity._multiUser)
- _ <- Fox.runIf(!multiUser.isSuperUser && selectedBoundingBoxParsed.isDefined)(
- jobService.assertBoundingBoxLimits(selectedBoundingBoxParsed.get, None))
+ _ <- Fox.runIf(!multiUser.isSuperUser && includesProofreading)(Fox.runOptional(selectedBoundingBox)(bbox =>
+ jobService.assertBoundingBoxLimits(bbox, None)))
commandArgs = Json.obj(
"organization_id" -> organization._id,
"dataset_name" -> dataset.name,
@@ -411,7 +410,7 @@ class JobController @Inject()(
"merge_segments" -> mergeSegments,
"volume_layer_name" -> volumeLayerName,
"includes_proofreading" -> includesProofreading,
- "selected_bounding_box" -> selectedBoundingBoxParsed
+ "selected_bounding_box" -> selectedBoundingBox
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunApplyMergerMode"
js <- jobService.publicWrites(job)