From a955217ad40ef7c63a00269122e8ac646dfed403 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 18 Jun 2024 15:59:46 +0200 Subject: [PATCH 001/361] WIP: Unified Annotation Versioning --- webknossos-datastore/proto/Annotation.proto | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 webknossos-datastore/proto/Annotation.proto diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto new file mode 100644 index 00000000000..74742e3cddd --- /dev/null +++ b/webknossos-datastore/proto/Annotation.proto @@ -0,0 +1,16 @@ +syntax = "proto2"; + +package com.scalableminds.webknossos.datastore; + +message AnnotationProto { + optional string name = 1; + optional string description = 2; + required int64 version = 3; + repeated AnnotationLayerProto layers = 4; +} + +message AnnotationLayerProto { + required string name = 1; + required int64 version = 2; + required string tracingId = 3; +} From a4361fe0ea337457c61100314e883227ae590a4d Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 19 Jun 2024 16:59:19 +0200 Subject: [PATCH 002/361] initialize route --- fossildb/run.sh | 2 +- .../controllers/DSAnnotationController.scala | 40 +++++++++++++++++++ .../tracings/TracingDataStore.scala | 2 + ...alableminds.webknossos.tracingstore.routes | 2 + 4 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala diff --git a/fossildb/run.sh b/fossildb/run.sh index 55853d3b302..39a622d2ee8 100755 --- a/fossildb/run.sh +++ b/fossildb/run.sh @@ -15,6 +15,6 @@ if [ ! -f "$JAR" ] || [ ! "$CURRENT_VERSION" == "$VERSION" ]; then fi # Note that the editableMappings column is no longer used by wk. Still here for backwards compatibility. -COLLECTIONS="skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate" +COLLECTIONS="skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates" exec java -jar "$JAR" -c "$COLLECTIONS" -d "$FOSSILDB_HOME/data" -b "$FOSSILDB_HOME/backup" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala new file mode 100644 index 00000000000..a5fc4b2355c --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -0,0 +1,40 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import com.google.inject.Inject +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.services.{AccessTokenService, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.{ExecutionContext, Future} + +class DSAnnotationController @Inject()(accessTokenService: AccessTokenService, tracingDataStore: TracingDataStore)( + implicit ec: ExecutionContext, + bodyParsers: PlayBodyParsers) + extends Controller + with KeyValueStoreImplicits { + + def initialize(annotationId: String, token: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + for { + _ <- tracingDataStore.annotations.put(annotationId, 0L, AnnotationProto(version = 0L)) + } yield Ok + } + } + } +} + +// get version history + +// update layer + +// restore of layer + +// delete layer + +// add layer + +// skeleton + volume routes can now take annotationVersion diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index 37df9decf09..629d79d18a9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -42,6 +42,8 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val editableMappingUpdates = new FossilDBClient("editableMappingUpdates", config, slackNotificationService) + lazy val annotations = new FossilDBClient("annotations", config, slackNotificationService) + private def shutdown(): Unit = { healthClient.shutdown() skeletons.shutdown() diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 7598dd0baa6..c876fc180e1 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -5,6 +5,8 @@ # Health endpoint GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.DSAnnotationController.initialize(annotationId: String, token: Option[String]) + # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) From 5a60d17049416fa9184c98944f044edc1f1d6a89 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 20 Jun 2024 14:08:07 +0200 Subject: [PATCH 003/361] update actions --- webknossos-datastore/proto/Annotation.proto | 41 ++++++++++++++- .../annotation/DSAnnotationService.scala | 51 +++++++++++++++++++ .../controllers/DSAnnotationController.scala | 2 + 3 files changed, 92 insertions(+), 2 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 74742e3cddd..77e9e902a40 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -2,6 +2,11 @@ syntax = "proto2"; package com.scalableminds.webknossos.datastore; +enum AnnotationLayerTypeProto { + skeleton = 1; + volume = 2; +} + message AnnotationProto { optional string name = 1; optional string description = 2; @@ -10,7 +15,39 @@ message AnnotationProto { } message AnnotationLayerProto { + required string tracingId = 1; + required string name = 2; + required int64 version = 3; + optional int64 editableMappingVersion = 4; + required AnnotationLayerTypeProto type = 5; +} + +message AddLayerAnnotationUpdateAction { required string name = 1; - required int64 version = 2; - required string tracingId = 3; + required string tracingId = 2; + required AnnotationLayerTypeProto type = 5; +} + +message DeleteLayerAnnotationUpdateAction { + required string tracingId = 1; +} + +message UpdateLayerAnnotationUpdateAction { + required string tracingId = 1; + required int64 layerVersion = 2; +} + +message UpdateLayerEditableMappingAnnotationUpdateAction { + required string tracingId = 1; + required int64 editableMappingVersion = 2; +} + +message UpdateLayerMetadataAnnotationUpdateAction { + required string tracingId = 1; + required string name = 2; +} + +message UpdateMetadataAnnotationUpdateAction { + optional string name = 1; + optional string description = 2; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala new file mode 100644 index 00000000000..29b408c4ef8 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -0,0 +1,51 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.{ + AddLayerAnnotationUpdateAction, + AnnotationLayerProto, + AnnotationProto, + DeleteLayerAnnotationUpdateAction, + UpdateLayerAnnotationUpdateAction, + UpdateLayerEditableMappingAnnotationUpdateAction, + UpdateLayerMetadataAnnotationUpdateAction, + UpdateMetadataAnnotationUpdateAction +} +import scalapb.GeneratedMessage + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class DSAnnotationService @Inject()() { + def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) + + def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage): AnnotationProto = { + + val withAppliedChange = updateAction match { + case a: AddLayerAnnotationUpdateAction => + annotation.copy( + layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, + a.name, + version = 0L, + editableMappingVersion = None, + `type` = a.`type`)) + case a: DeleteLayerAnnotationUpdateAction => + annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)) + case a: UpdateLayerAnnotationUpdateAction => + annotation.copy( + layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(version = a.layerVersion) else l)) + case a: UpdateLayerEditableMappingAnnotationUpdateAction => + annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(editableMappingVersion = Some(a.editableMappingVersion)) else l)) + case a: UpdateLayerMetadataAnnotationUpdateAction => + annotation.copy( + layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.name) else l)) + case a: UpdateMetadataAnnotationUpdateAction => + annotation.copy(name = a.name, description = a.description) + // TODO error case + + } + withAppliedChange.copy(version = withAppliedChange.version + 1L) + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index a5fc4b2355c..31fbcc6e408 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -38,3 +38,5 @@ class DSAnnotationController @Inject()(accessTokenService: AccessTokenService, t // add layer // skeleton + volume routes can now take annotationVersion + +// Is an editable mapping a layer? From a698f11e6ba5c593cc830ddb995801907c336f48 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 20 Jun 2024 16:45:59 +0200 Subject: [PATCH 004/361] error case --- webknossos-datastore/proto/Annotation.proto | 2 + .../annotation/DSAnnotationService.scala | 55 ++++++++++--------- 2 files changed, 31 insertions(+), 26 deletions(-) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 77e9e902a40..16c38629d66 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -51,3 +51,5 @@ message UpdateMetadataAnnotationUpdateAction { optional string name = 1; optional string description = 2; } + +// TODO restoreLayer? diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index 29b408c4ef8..ae60f6c65c4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -19,33 +19,36 @@ import scala.concurrent.ExecutionContext class DSAnnotationService @Inject()() { def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) - def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage): AnnotationProto = { + def newestMaterializableVersion(annotationId: String): Fox[Long] = ??? - val withAppliedChange = updateAction match { - case a: AddLayerAnnotationUpdateAction => - annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, - a.name, - version = 0L, - editableMappingVersion = None, - `type` = a.`type`)) - case a: DeleteLayerAnnotationUpdateAction => - annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)) - case a: UpdateLayerAnnotationUpdateAction => - annotation.copy( - layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(version = a.layerVersion) else l)) - case a: UpdateLayerEditableMappingAnnotationUpdateAction => - annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(editableMappingVersion = Some(a.editableMappingVersion)) else l)) - case a: UpdateLayerMetadataAnnotationUpdateAction => - annotation.copy( - layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.name) else l)) - case a: UpdateMetadataAnnotationUpdateAction => - annotation.copy(name = a.name, description = a.description) - // TODO error case + def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( + implicit ec: ExecutionContext): Fox[AnnotationProto] = + for { - } - withAppliedChange.copy(version = withAppliedChange.version + 1L) - } + withAppliedChange <- updateAction match { + case a: AddLayerAnnotationUpdateAction => + Fox.successful( + annotation.copy( + layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, + a.name, + version = 0L, + editableMappingVersion = None, + `type` = a.`type`))) + case a: DeleteLayerAnnotationUpdateAction => + Fox.successful(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) + case a: UpdateLayerAnnotationUpdateAction => + Fox.successful(annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(version = a.layerVersion) else l))) + case a: UpdateLayerEditableMappingAnnotationUpdateAction => + Fox.successful(annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(editableMappingVersion = Some(a.editableMappingVersion)) else l))) + case a: UpdateLayerMetadataAnnotationUpdateAction => + Fox.successful(annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.name) else l))) + case a: UpdateMetadataAnnotationUpdateAction => + Fox.successful(annotation.copy(name = a.name, description = a.description)) + case _ => Fox.failure("Received unsupported AnnotationUpdaetAction action") + } + } yield withAppliedChange.copy(version = withAppliedChange.version + 1L) } From 9113ee1f75c7cf6e5eb00b938c0d816dc387d468 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 20 Jun 2024 17:47:55 +0200 Subject: [PATCH 005/361] fix injector --- app/controllers/Application.scala | 15 +++++++++++++++ conf/webknossos.latest.routes | 1 + .../controllers/DSAnnotationController.scala | 9 +++++---- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/app/controllers/Application.scala b/app/controllers/Application.scala index 7d0ba191d18..18af7eaaef8 100755 --- a/app/controllers/Application.scala +++ b/app/controllers/Application.scala @@ -15,6 +15,11 @@ import utils.{ApiVersioning, StoreModules, WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext +import scalapb.GeneratedMessage +import com.scalableminds.webknossos.datastore.Annotation.{ + UpdateLayerAnnotationUpdateAction, + UpdateLayerMetadataAnnotationUpdateAction +} class Application @Inject()(actorSystem: ActorSystem, userService: UserService, @@ -30,6 +35,16 @@ class Application @Inject()(actorSystem: ActorSystem, private lazy val Mailer = actorSystem.actorSelection("/user/mailActor") + def test: Action[AnyContent] = Action.async { implicit request => + Fox.successful(Ok(matchThing(UpdateLayerMetadataAnnotationUpdateAction("id", "name")))) + } + + private def matchThing(m: GeneratedMessage): String = m match { + case u: UpdateLayerAnnotationUpdateAction => "update!" + case u: UpdateLayerMetadataAnnotationUpdateAction => "other update!" + case _ => "anything else!" + } + // Note: This route is used by external applications, keep stable def buildInfo: Action[AnyContent] = sil.UserAwareAction.async { for { diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index b79feba1742..2f47c9d57a1 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -1,6 +1,7 @@ # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ +GET /test controllers.Application.test() GET /buildinfo controllers.Application.buildInfo() GET /features controllers.Application.features() GET /health controllers.Application.health() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index 31fbcc6e408..e534925f6c0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -3,15 +3,16 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.controllers.Controller -import com.scalableminds.webknossos.datastore.services.{AccessTokenService, UserAccessRequest} +import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.{ExecutionContext, Future} -class DSAnnotationController @Inject()(accessTokenService: AccessTokenService, tracingDataStore: TracingDataStore)( - implicit ec: ExecutionContext, - bodyParsers: PlayBodyParsers) +class DSAnnotationController @Inject()( + accessTokenService: TracingStoreAccessTokenService, + tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { From 332eb19001705e53e696ebf4627d11def7169b89 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 8 Jul 2024 11:48:44 +0200 Subject: [PATCH 006/361] sparse versions --- webknossos-datastore/proto/Annotation.proto | 14 +------------- 1 file changed, 1 insertion(+), 13 deletions(-) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 16c38629d66..4fe56262b5c 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -17,9 +17,7 @@ message AnnotationProto { message AnnotationLayerProto { required string tracingId = 1; required string name = 2; - required int64 version = 3; - optional int64 editableMappingVersion = 4; - required AnnotationLayerTypeProto type = 5; + required AnnotationLayerTypeProto type = 4; } message AddLayerAnnotationUpdateAction { @@ -32,16 +30,6 @@ message DeleteLayerAnnotationUpdateAction { required string tracingId = 1; } -message UpdateLayerAnnotationUpdateAction { - required string tracingId = 1; - required int64 layerVersion = 2; -} - -message UpdateLayerEditableMappingAnnotationUpdateAction { - required string tracingId = 1; - required int64 editableMappingVersion = 2; -} - message UpdateLayerMetadataAnnotationUpdateAction { required string tracingId = 1; required string name = 2; From b46af6360fdcaecf44d1a6d83fe31883bb57ac33 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 8 Jul 2024 11:59:40 +0200 Subject: [PATCH 007/361] WIP: revert actions for editable mappings --- .../editablemapping/EditableMappingUpdateActions.scala | 5 +++++ .../tracings/editablemapping/EditableMappingUpdater.scala | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 652f13c96d9..0bde8f4473b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -43,6 +43,11 @@ object MergeAgglomerateUpdateAction { implicit val jsonFormat: OFormat[MergeAgglomerateUpdateAction] = Json.format[MergeAgglomerateUpdateAction] } +case class RevertToVersionUpdateAction(sourceVersion: Long, actionTimestamp: Option[Long] = None) + extends EditableMappingUpdateAction { + override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) +} + object EditableMappingUpdateAction { implicit object editableMappingUpdateActionFormat extends Format[EditableMappingUpdateAction] { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 545c94c92cb..80a3f0151cb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -103,6 +103,8 @@ class EditableMappingUpdater( applySplitAction(mapping, splitAction) ?~> "Failed to apply split action" case mergeAction: MergeAgglomerateUpdateAction => applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" + case revertAction: RevertToVersionUpdateAction => + revertToVersion(mapping, revertAction) ?~> "Failed to apply revert action" } private def applySplitAction(editableMappingInfo: EditableMappingInfo, update: SplitAgglomerateUpdateAction)( @@ -385,4 +387,6 @@ class EditableMappingUpdater( ) } + private def revertToVersion(mapping: EditableMappingInfo, + revertAction: RevertToVersionUpdateAction): Fox[EditableMappingInfo] = {} } From 8f1094d9377399405726660bb9f825779ed42068 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 8 Jul 2024 13:50:06 +0200 Subject: [PATCH 008/361] wip: revert editable mappings --- app/controllers/Application.scala | 15 ---- conf/webknossos.latest.routes | 1 - .../SegmentToAgglomerateProto_pb2.py | 10 +-- .../proto/SegmentToAgglomerateProto.proto | 2 +- .../annotation/DSAnnotationService.scala | 15 +--- .../EditableMappingService.scala | 7 +- .../EditableMappingStreams.scala | 71 +++++++++++++++++++ .../EditableMappingUpdater.scala | 14 ++-- 8 files changed, 91 insertions(+), 44 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala diff --git a/app/controllers/Application.scala b/app/controllers/Application.scala index 18af7eaaef8..7d0ba191d18 100755 --- a/app/controllers/Application.scala +++ b/app/controllers/Application.scala @@ -15,11 +15,6 @@ import utils.{ApiVersioning, StoreModules, WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext -import scalapb.GeneratedMessage -import com.scalableminds.webknossos.datastore.Annotation.{ - UpdateLayerAnnotationUpdateAction, - UpdateLayerMetadataAnnotationUpdateAction -} class Application @Inject()(actorSystem: ActorSystem, userService: UserService, @@ -35,16 +30,6 @@ class Application @Inject()(actorSystem: ActorSystem, private lazy val Mailer = actorSystem.actorSelection("/user/mailActor") - def test: Action[AnyContent] = Action.async { implicit request => - Fox.successful(Ok(matchThing(UpdateLayerMetadataAnnotationUpdateAction("id", "name")))) - } - - private def matchThing(m: GeneratedMessage): String = m match { - case u: UpdateLayerAnnotationUpdateAction => "update!" - case u: UpdateLayerMetadataAnnotationUpdateAction => "other update!" - case _ => "anything else!" - } - // Note: This route is used by external applications, keep stable def buildInfo: Action[AnyContent] = sil.UserAwareAction.async { for { diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 2f47c9d57a1..b79feba1742 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -1,7 +1,6 @@ # Routes # This file defines all application routes (Higher priority routes first) # ~~~~ -GET /test controllers.Application.test() GET /buildinfo controllers.Application.buildInfo() GET /features controllers.Application.features() GET /health controllers.Application.health() diff --git a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py index 91d2140c7b3..d7b553b51d0 100644 --- a/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py +++ b/tools/migrate-editable-mappings/SegmentToAgglomerateProto_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: SegmentToAgglomerateProto.proto +# source: SegmentToAgglomerateChunkProto.proto """Generated protocol buffer code.""" from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor @@ -13,15 +13,15 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fSegmentToAgglomerateChunkProto.proto\x12&com.scalableminds.webknossos.datastore\"B\n\x16SegmentAgglomeratePair\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12\x15\n\ragglomerateId\x18\x02 \x02(\x03\"y\n\x19SegmentToAgglomerateChunkProto\x12\\\n\x14segmentToAgglomerate\x18\x01 \x03(\x0b\x32>.com.scalableminds.webknossos.datastore.SegmentAgglomeratePair') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateProto_pb2', globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SegmentToAgglomerateChunkProto_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None _SEGMENTAGGLOMERATEPAIR._serialized_start=75 _SEGMENTAGGLOMERATEPAIR._serialized_end=141 - _SEGMENTTOAGGLOMERATEPROTO._serialized_start=143 - _SEGMENTTOAGGLOMERATEPROTO._serialized_end=264 + _SegmentToAgglomerateChunkProto._serialized_start=143 + _SegmentToAgglomerateChunkProto._serialized_end=264 # @@protoc_insertion_point(module_scope) diff --git a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto index 519276323c3..6bb61fdf783 100644 --- a/webknossos-datastore/proto/SegmentToAgglomerateProto.proto +++ b/webknossos-datastore/proto/SegmentToAgglomerateProto.proto @@ -7,6 +7,6 @@ message SegmentAgglomeratePair { required int64 agglomerateId = 2; } -message SegmentToAgglomerateProto { +message SegmentToAgglomerateChunkProto { repeated SegmentAgglomeratePair segmentToAgglomerate = 1; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index ae60f6c65c4..f6dffa0c274 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -6,8 +6,6 @@ import com.scalableminds.webknossos.datastore.Annotation.{ AnnotationLayerProto, AnnotationProto, DeleteLayerAnnotationUpdateAction, - UpdateLayerAnnotationUpdateAction, - UpdateLayerEditableMappingAnnotationUpdateAction, UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } @@ -28,20 +26,9 @@ class DSAnnotationService @Inject()() { withAppliedChange <- updateAction match { case a: AddLayerAnnotationUpdateAction => Fox.successful( - annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, - a.name, - version = 0L, - editableMappingVersion = None, - `type` = a.`type`))) + annotation.copy(layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, a.name, `type` = a.`type`))) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) - case a: UpdateLayerAnnotationUpdateAction => - Fox.successful(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(version = a.layerVersion) else l))) - case a: UpdateLayerEditableMappingAnnotationUpdateAction => - Fox.successful(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(editableMappingVersion = Some(a.editableMappingVersion)) else l))) case a: UpdateLayerMetadataAnnotationUpdateAction => Fox.successful(annotation.copy(layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.name) else l))) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index b6d86171fb3..8811fcf3110 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -7,7 +7,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo -import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateProto +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Tree, TreeTypeProto} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto @@ -292,7 +292,6 @@ class EditableMappingService @Inject()( tracingDataStore, relyOnAgglomerateIds = pendingUpdates.length <= 1 ) - updated <- updater.applyUpdatesAndSave(closestMaterializedWithVersion.value, pendingUpdates) } yield updated } yield updatedEditableMappingInfo @@ -416,9 +415,9 @@ class EditableMappingService @Inject()( agglomerateId: Long, version: Option[Long]): Fox[Seq[(Long, Long)]] = for { - keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate + keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateChunkProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate .get(segmentToAgglomerateKey(editableMappingId, agglomerateId), version, mayBeEmpty = Some(true))( - fromProtoBytes[SegmentToAgglomerateProto]) + fromProtoBytes[SegmentToAgglomerateChunkProto]) valueProto = keyValuePair.value asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala new file mode 100644 index 00000000000..1bedcb9101f --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -0,0 +1,71 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto +import com.scalableminds.webknossos.tracingstore.tracings.{ + FossilDBClient, + KeyValueStoreImplicits, + VersionedKeyValuePair +} +import net.liftweb.common.Full + +import scala.annotation.tailrec + +class VersionedAgglomerateToGraphIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) {} + +class VersionedSegmentToAgglomerateChunkIterator(prefix: String, + segmentToAgglomerateDataStore: FossilDBClient, + version: Option[Long] = None) + extends Iterator[(String, SegmentToAgglomerateChunkProto)] + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextBucket: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + private def isRevertedChunk(chunkBytes: Array[Byte]): Boolean = + chunkBytes sameElements Array[Byte](0) + + @tailrec + private def getNextNonRevertedChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val chunkParsedBox = fromProtoBytes[SegmentToAgglomerateChunkProto](chunk.value) + chunkParsedBox match { + case _ if isRevertedChunk(chunk.value) => getNextNonRevertedChunk + case Full(chunkParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = chunkParsed)) + case _ => getNextNonRevertedChunk + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedChunk + } + + override def hasNext: Boolean = + if (nextBucket.isDefined) true + else { + nextBucket = getNextNonRevertedChunk + nextBucket.isDefined + } + + override def next(): (String, SegmentToAgglomerateChunkProto) = { + val nextRes = nextBucket match { + case Some(bucket) => bucket + case None => getNextNonRevertedChunk.get + } + nextBucket = None + (nextRes.key, nextRes.value) + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 80a3f0151cb..8780ff4313e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.AgglomerateGraph.{AgglomerateEdge, import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentAgglomeratePair, - SegmentToAgglomerateProto + SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -64,7 +64,7 @@ class EditableMappingUpdater( private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { val chunk = segmentToAgglomerateBuffer(key) - val proto = SegmentToAgglomerateProto(chunk.toVector.map { segmentAgglomerateTuple => + val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) }) tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, proto.toByteArray) @@ -387,6 +387,12 @@ class EditableMappingUpdater( ) } - private def revertToVersion(mapping: EditableMappingInfo, - revertAction: RevertToVersionUpdateAction): Fox[EditableMappingInfo] = {} + private def revertToVersion(mapping: EditableMappingInfo, revertAction: RevertToVersionUpdateAction)( + implicit ec: ExecutionContext): Fox[EditableMappingInfo] = { + val segmentToAgglomerateChunkStream = new VersionedSegmentToAgglomerateChunkIterator( + editableMappingId, + tracingDataStore.editableMappingsSegmentToAgglomerate) + Fox.failure("todo") + } + } From f300febfaa04a99bd225a7fb12e865882a124551 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 10:27:55 +0200 Subject: [PATCH 009/361] iterate on reverting editable mappings --- .../EditableMappingService.scala | 1 + .../EditableMappingStreams.scala | 84 +++++++++++++++---- .../EditableMappingUpdater.scala | 60 +++++++++---- .../tracings/volume/VolumeDataZipHelper.scala | 4 +- .../volume/VolumeTracingBucketHelper.scala | 16 ++-- .../volume/VolumeTracingDownsampling.scala | 9 +- .../volume/VolumeTracingService.scala | 2 +- .../tracings/volume/WKWBucketStreamSink.scala | 4 +- .../volume/Zarr3BucketStreamSink.scala | 4 +- 9 files changed, 134 insertions(+), 50 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index e9697e23b27..c16671a9640 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -418,6 +418,7 @@ class EditableMappingService @Inject()( keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateChunkProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate .get(segmentToAgglomerateKey(editableMappingId, agglomerateId), version, mayBeEmpty = Some(true))( fromProtoBytes[SegmentToAgglomerateChunkProto]) + // interpret zero-byte as Fox.empty valueProto = keyValuePair.value asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala index 1bedcb9101f..cae82ad526c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -1,6 +1,8 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.SegmentToAgglomerateChunkProto +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FossilDBClient, KeyValueStoreImplicits, @@ -12,18 +14,70 @@ import scala.annotation.tailrec class VersionedAgglomerateToGraphIterator(prefix: String, segmentToAgglomerateDataStore: FossilDBClient, - version: Option[Long] = None) {} + version: Option[Long] = None) + extends Iterator[(String, AgglomerateGraph, Long)] + with ReversionHelper + with KeyValueStoreImplicits { + private val batchSize = 64 + + private var currentStartAfterKey: Option[String] = None + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var nextGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = None + + private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = + segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator + + private def fetchNextAndSave = { + currentBatchIterator = fetchNext + currentBatchIterator + } + + @tailrec + private def getNextNonRevertedGraph: Option[VersionedKeyValuePair[AgglomerateGraph]] = + if (currentBatchIterator.hasNext) { + val chunk = currentBatchIterator.next() + currentStartAfterKey = Some(chunk.key) + val graphParsedBox = fromProtoBytes[AgglomerateGraph](chunk.value) + graphParsedBox match { + case _ if isRevertedElement(chunk.value) => getNextNonRevertedGraph + case Full(graphParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = graphParsed)) + case _ => getNextNonRevertedGraph + } + } else { + if (!fetchNextAndSave.hasNext) None + else getNextNonRevertedGraph + } + + override def hasNext: Boolean = + if (nextGraph.isDefined) true + else { + nextGraph = getNextNonRevertedGraph + nextGraph.isDefined + } + + override def next(): (String, AgglomerateGraph, Long) = { + val nextRes = nextGraph match { + case Some(bucket) => bucket + case None => getNextNonRevertedGraph.get + } + nextGraph = None + // TODO: parse graph key? (=agglomerate id) + (nextRes.key, nextRes.value, nextRes.version) + } + +} class VersionedSegmentToAgglomerateChunkIterator(prefix: String, segmentToAgglomerateDataStore: FossilDBClient, version: Option[Long] = None) - extends Iterator[(String, SegmentToAgglomerateChunkProto)] + extends Iterator[(String, SegmentToAgglomerateChunkProto, Long)] + with ReversionHelper with KeyValueStoreImplicits { private val batchSize = 64 private var currentStartAfterKey: Option[String] = None private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext - private var nextBucket: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = None + private var nextChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = None private def fetchNext: Iterator[VersionedKeyValuePair[Array[Byte]]] = segmentToAgglomerateDataStore.getMultipleKeys(currentStartAfterKey, Some(prefix), version, Some(batchSize)).iterator @@ -33,9 +87,6 @@ class VersionedSegmentToAgglomerateChunkIterator(prefix: String, currentBatchIterator } - private def isRevertedChunk(chunkBytes: Array[Byte]): Boolean = - chunkBytes sameElements Array[Byte](0) - @tailrec private def getNextNonRevertedChunk: Option[VersionedKeyValuePair[SegmentToAgglomerateChunkProto]] = if (currentBatchIterator.hasNext) { @@ -43,9 +94,9 @@ class VersionedSegmentToAgglomerateChunkIterator(prefix: String, currentStartAfterKey = Some(chunk.key) val chunkParsedBox = fromProtoBytes[SegmentToAgglomerateChunkProto](chunk.value) chunkParsedBox match { - case _ if isRevertedChunk(chunk.value) => getNextNonRevertedChunk - case Full(chunkParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = chunkParsed)) - case _ => getNextNonRevertedChunk + case _ if isRevertedElement(chunk.value) => getNextNonRevertedChunk + case Full(chunkParsed) => Some(VersionedKeyValuePair(versionedKey = chunk.versionedKey, value = chunkParsed)) + case _ => getNextNonRevertedChunk } } else { if (!fetchNextAndSave.hasNext) None @@ -53,19 +104,20 @@ class VersionedSegmentToAgglomerateChunkIterator(prefix: String, } override def hasNext: Boolean = - if (nextBucket.isDefined) true + if (nextChunk.isDefined) true else { - nextBucket = getNextNonRevertedChunk - nextBucket.isDefined + nextChunk = getNextNonRevertedChunk + nextChunk.isDefined } - override def next(): (String, SegmentToAgglomerateChunkProto) = { - val nextRes = nextBucket match { + override def next(): (String, SegmentToAgglomerateChunkProto, Long) = { + val nextRes = nextChunk match { case Some(bucket) => bucket case None => getNextNonRevertedChunk.get } - nextBucket = None - (nextRes.key, nextRes.value) + nextChunk = None + // TODO: parse chunk key? + (nextRes.key, nextRes.value, nextRes.version) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 8780ff4313e..22ad3a7bdf1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -43,10 +43,12 @@ class EditableMappingUpdater( with FoxImplicits with LazyLogging { - private val segmentToAgglomerateBuffer: mutable.Map[String, Map[Long, Long]] = - new mutable.HashMap[String, Map[Long, Long]]() - private val agglomerateToGraphBuffer: mutable.Map[String, AgglomerateGraph] = - new mutable.HashMap[String, AgglomerateGraph]() + // chunkKey → (Map[segmentId → agglomerateId], isToBeReverted) + private val segmentToAgglomerateBuffer: mutable.Map[String, (Map[Long, Long], Boolean)] = + new mutable.HashMap[String, (Map[Long, Long], Boolean)]() + // agglomerateKey → (agglomerateGraph, isToBeReverted) + private val agglomerateToGraphBuffer: mutable.Map[String, (AgglomerateGraph, Boolean)] = + new mutable.HashMap[String, (AgglomerateGraph, Boolean)]() def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, updates: List[EditableMappingUpdateAction])( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = @@ -63,7 +65,8 @@ class EditableMappingUpdater( } yield () private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { - val chunk = segmentToAgglomerateBuffer(key) + val (chunk, isToBeReverted) = segmentToAgglomerateBuffer(key) + // TODO respect isToBeReverted val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) }) @@ -71,7 +74,8 @@ class EditableMappingUpdater( } private def flushAgglomerateGraph(key: String): Fox[Unit] = { - val graph = agglomerateToGraphBuffer(key) + val (graph, isToBeReverted) = agglomerateToGraphBuffer(key) + // TODO respect isToBeReverted tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, graph) } @@ -104,7 +108,7 @@ class EditableMappingUpdater( case mergeAction: MergeAgglomerateUpdateAction => applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" case revertAction: RevertToVersionUpdateAction => - revertToVersion(mapping, revertAction) ?~> "Failed to apply revert action" + revertToVersion(revertAction) ?~> "Failed to apply revert action" } private def applySplitAction(editableMappingInfo: EditableMappingInfo, update: SplitAgglomerateUpdateAction)( @@ -160,6 +164,7 @@ class EditableMappingUpdater( val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize val chunkKey = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) val chunkFromBufferOpt = segmentToAgglomerateBuffer.get(chunkKey) + // TODO isToBeReverted → None for { chunk <- Fox.fillOption(chunkFromBufferOpt) { editableMappingService @@ -192,13 +197,14 @@ class EditableMappingUpdater( existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" mergedMap = existingChunk ++ segmentIdsToUpdate.map(_ -> agglomerateId).toMap _ = segmentToAgglomerateBuffer.put(editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId), - mergedMap) + (mergedMap, false)) } yield () private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { val key = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) val fromBufferOpt = segmentToAgglomerateBuffer.get(key) + // TODO isToBeReverted → None Fox.fillOption(fromBufferOpt) { editableMappingService .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) @@ -210,6 +216,7 @@ class EditableMappingUpdater( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) val fromBufferOpt = agglomerateToGraphBuffer.get(key) + // TODO isToBeReverted → None fromBufferOpt.map(Fox.successful(_)).getOrElse { editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, editableMappingId, @@ -222,7 +229,7 @@ class EditableMappingUpdater( private def updateAgglomerateGraph(agglomerateId: Long, graph: AgglomerateGraph): Unit = { val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - agglomerateToGraphBuffer.put(key, graph) + agglomerateToGraphBuffer.put(key, (graph, false)) } private def splitGraph(agglomerateId: Long, @@ -387,12 +394,33 @@ class EditableMappingUpdater( ) } - private def revertToVersion(mapping: EditableMappingInfo, revertAction: RevertToVersionUpdateAction)( - implicit ec: ExecutionContext): Fox[EditableMappingInfo] = { - val segmentToAgglomerateChunkStream = new VersionedSegmentToAgglomerateChunkIterator( - editableMappingId, - tracingDataStore.editableMappingsSegmentToAgglomerate) - Fox.failure("todo") - } + private def revertToVersion(revertAction: RevertToVersionUpdateAction)( + implicit ec: ExecutionContext): Fox[EditableMappingInfo] = + for { + _ <- bool2Fox(revertAction.sourceVersion >= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" + oldInfo <- editableMappingService.getInfo(editableMappingId, + Some(revertAction.sourceVersion), + remoteFallbackLayer, + userToken) + _ = segmentToAgglomerateBuffer.clear() + _ = agglomerateToGraphBuffer.clear() + segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( + editableMappingId, + tracingDataStore.editableMappingsSegmentToAgglomerate) + _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { + case (chunkKey, chunkDataBeforeRevert, version) => + if (version > revertAction.sourceVersion) { + // TODO fetch old chunk, save to buffer. if empty, write zero-byte + case Empty => Fox.successful(()) // TODO save zero-byte + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + Fox.successful(()) + } else Fox.successful(()) + } + agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( + editableMappingId, + tracingDataStore.editableMappingsAgglomerateToGraph) + // TODO do we need to iterate over old *and* new to get all additions + removals? + } yield oldInfo } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala index ce74001a669..35aacb77e98 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeDataZipHelper.scala @@ -27,7 +27,7 @@ import scala.concurrent.ExecutionContext trait VolumeDataZipHelper extends WKWDataFormatHelper - with VolumeBucketReversionHelper + with ReversionHelper with BoxImplicits with LazyLogging { @@ -61,7 +61,7 @@ trait VolumeDataZipHelper parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => if (buckets.hasNext) { val data = buckets.next() - if (!isRevertedBucket(data)) { + if (!isRevertedElement(data)) { block(bucketPosition, data) } else Fox.successful(()) } else Fox.successful(()) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 97d64b826db..3aa03f8be6d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -15,10 +15,12 @@ import scala.annotation.tailrec import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -trait VolumeBucketReversionHelper { - protected def isRevertedBucket(data: Array[Byte]): Boolean = data sameElements Array[Byte](0) +trait ReversionHelper { + val revertedValue: Array[Byte] = Array[Byte](0) - protected def isRevertedBucket(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedBucket(bucket.value) + protected def isRevertedElement(data: Array[Byte]): Boolean = data.sameElements(revertedValue) + + protected def isRevertedElement(bucket: VersionedKeyValuePair[Array[Byte]]): Boolean = isRevertedElement(bucket.value) } trait VolumeBucketCompression extends LazyLogging { @@ -173,7 +175,7 @@ trait VolumeTracingBucketHelper with VolumeBucketCompression with DataConverter with BucketKeys - with VolumeBucketReversionHelper { + with ReversionHelper { implicit def ec: ExecutionContext @@ -196,7 +198,7 @@ trait VolumeTracingBucketHelper case None => volumeDataStore.get(key, version, mayBeEmpty = Some(true)) } val unpackedDataFox = dataFox.flatMap { versionedVolumeBucket => - if (isRevertedBucket(versionedVolumeBucket)) Fox.empty + if (isRevertedElement(versionedVolumeBucket)) Fox.empty else { val debugInfo = s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" @@ -304,7 +306,7 @@ class VersionedBucketIterator(prefix: String, with VolumeBucketCompression with BucketKeys with FoxImplicits - with VolumeBucketReversionHelper { + with ReversionHelper { private val batchSize = 64 private var currentStartAfterKey: Option[String] = None @@ -324,7 +326,7 @@ class VersionedBucketIterator(prefix: String, if (currentBatchIterator.hasNext) { val bucket = currentBatchIterator.next() currentStartAfterKey = Some(bucket.key) - if (isRevertedBucket(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { + if (isRevertedElement(bucket) || parseBucketKey(bucket.key, additionalAxes).isEmpty) { getNextNonRevertedBucket } else { Some(bucket) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index b5c9eec46c2..bb2dd9f18a1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -52,6 +52,7 @@ trait VolumeTracingDownsampling with ProtoGeometryImplicits with VolumeBucketCompression with KeyValueStoreImplicits + with ReversionHelper with FoxImplicits { val tracingDataStore: TracingDataStore @@ -89,7 +90,7 @@ trait VolumeTracingDownsampling sourceMag = getSourceMag(tracing) magsToCreate <- getMagsToCreate(tracing, oldTracingId) elementClass = elementClassFromProto(tracing.elementClass) - bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => Array[Byte](0)) + bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => revertedValue) _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) originalBucketPositions = bucketDataMapMutable.keys.toList updatedBucketsMutable = new mutable.ListBuffer[BucketPosition]() @@ -167,8 +168,8 @@ trait VolumeTracingDownsampling sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMapMutable(_)) val downsampledData: Array[Byte] = - if (sourceData.forall(_.sameElements(Array[Byte](0)))) - Array[Byte](0) + if (sourceData.forall(_.sameElements(revertedValue))) + revertedValue else { val sourceDataFilled = fillZeroedIfNeeded(sourceData, bucketVolume, dataLayer.bytesPerElement) val sourceDataTyped = UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) @@ -216,7 +217,7 @@ trait VolumeTracingDownsampling // Reverted buckets and missing buckets are represented by a single zero-byte. // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. sourceData.map { sourceBucketData => - if (sourceBucketData.sameElements(Array[Byte](0))) { + if (isRevertedElement(sourceBucketData)) { Array.fill[Byte](bucketVolume * bytesPerElement)(0) } else sourceBucketData } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 5fa306a9d2c..74c46726eae 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -330,7 +330,7 @@ class VolumeTracingService @Inject()( } yield () case Empty => for { - dataAfterRevert <- Fox.successful(Array[Byte](0)) + dataAfterRevert <- Fox.successful(revertedValue) _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( updateSegmentIndex( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala index ec8df4f8f59..b1783fe8d9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/WKWBucketStreamSink.scala @@ -18,7 +18,7 @@ import scala.concurrent.{ExecutionContext, Future} class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean) extends WKWDataFormatHelper - with VolumeBucketReversionHelper + with ReversionHelper with ByteUtils { def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])], mags: Seq[Vec3Int])( @@ -27,7 +27,7 @@ class WKWBucketStreamSink(val layer: DataLayer, tracingHasFallbackLayer: Boolean val header = WKWHeader(1, DataLayer.bucketLength, ChunkType.LZ4, voxelType, numChannels) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isRevertedBucket(data) else isAllZero(data) + val skipBucket = if (tracingHasFallbackLayer) isRevertedElement(data) else isAllZero(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 2d1d024ace4..ac85fd86b5c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -27,7 +27,7 @@ import scala.concurrent.{ExecutionContext, Future} // Creates data zip from volume tracings class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLayer: Boolean) extends ProtoGeometryImplicits - with VolumeBucketReversionHelper + with ReversionHelper with ByteUtils { private lazy val defaultLayerName = "volumeAnnotationData" @@ -74,7 +74,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay ) bucketStream.flatMap { case (bucket, data) => - val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedBucket(data) + val skipBucket = if (tracingHasFallbackLayer) isAllZero(data) else isRevertedElement(data) if (skipBucket) { // If the tracing has no fallback segmentation, all-zero buckets can be omitted entirely None From 9f99feb2edb29f8bc801f2b0eab38c46ee31e70c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 11:43:57 +0200 Subject: [PATCH 010/361] revert agglomerateToGraph + segmentToAgglomerate in buffers --- .../EditableMappingService.scala | 30 ++++--- .../EditableMappingUpdater.scala | 81 ++++++++++++++----- 2 files changed, 80 insertions(+), 31 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index c16671a9640..3b3ca98d6bf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -21,6 +21,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, @@ -96,6 +97,7 @@ class EditableMappingService @Inject()( extends KeyValueStoreImplicits with FallbackDataHelper with FoxImplicits + with ReversionHelper with LazyLogging with ProtoGeometryImplicits { @@ -413,13 +415,19 @@ class EditableMappingService @Inject()( private def getSegmentToAgglomerateChunk(editableMappingId: String, agglomerateId: Long, - version: Option[Long]): Fox[Seq[(Long, Long)]] = + version: Option[Long]): Fox[Seq[(Long, Long)]] = { + val chunkKey = segmentToAgglomerateKey(editableMappingId, agglomerateId) + getSegmentToAgglomerateChunk(editableMappingId, chunkKey, version) + } + + def getSegmentToAgglomerateChunk(editableMappingId: String, + chunkKey: String, + version: Option[Long]): Fox[Seq[(Long, Long)]] = for { - keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateChunkProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate - .get(segmentToAgglomerateKey(editableMappingId, agglomerateId), version, mayBeEmpty = Some(true))( - fromProtoBytes[SegmentToAgglomerateChunkProto]) - // interpret zero-byte as Fox.empty - valueProto = keyValuePair.value + keyValuePairBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsSegmentToAgglomerate + .get(chunkKey, version, mayBeEmpty = Some(true)) + valueProto <- if (isRevertedElement(keyValuePairBytes.value)) Fox.empty + else fromProtoBytes[SegmentToAgglomerateChunkProto](keyValuePairBytes.value).toFox asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence @@ -600,10 +608,12 @@ class EditableMappingService @Inject()( agglomerateGraph <- agglomerateToGraphCache.getOrLoad( (mappingId, agglomerateId, version), _ => - tracingDataStore.editableMappingsAgglomerateToGraph - .get(agglomerateGraphKey(mappingId, agglomerateId), Some(version), mayBeEmpty = Some(true))( - fromProtoBytes[AgglomerateGraph]) - .map(_.value) + for { + graphBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsAgglomerateToGraph + .get(agglomerateGraphKey(mappingId, agglomerateId), Some(version), mayBeEmpty = Some(true)) + graphParsed <- if (isRevertedElement(graphBytes.value)) Fox.empty + else fromProtoBytes[AgglomerateGraph](graphBytes.value).toFox + } yield graphParsed ) } yield agglomerateGraph diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 22ad3a7bdf1..564b2c41df8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -9,6 +9,7 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient +import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, RemoteFallbackLayer, @@ -40,6 +41,7 @@ class EditableMappingUpdater( tracingDataStore: TracingDataStore, relyOnAgglomerateIds: Boolean // False during merge and in case of multiple actions. Then, look up all agglomerate ids at positions ) extends KeyValueStoreImplicits + with ReversionHelper with FoxImplicits with LazyLogging { @@ -66,17 +68,21 @@ class EditableMappingUpdater( private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { val (chunk, isToBeReverted) = segmentToAgglomerateBuffer(key) - // TODO respect isToBeReverted - val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => - SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) - }) - tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, proto.toByteArray) + val valueToFlush: Array[Byte] = + if (isToBeReverted) revertedValue + else { + val proto = SegmentToAgglomerateChunkProto(chunk.toVector.map { segmentAgglomerateTuple => + SegmentAgglomeratePair(segmentAgglomerateTuple._1, segmentAgglomerateTuple._2) + }) + proto.toByteArray + } + tracingDataStore.editableMappingsSegmentToAgglomerate.put(key, newVersion, valueToFlush) } private def flushAgglomerateGraph(key: String): Fox[Unit] = { val (graph, isToBeReverted) = agglomerateToGraphBuffer(key) - // TODO respect isToBeReverted - tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, graph) + val valueToFlush: Array[Byte] = if (isToBeReverted) revertedValue else graph + tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, valueToFlush) } private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[EditableMappingUpdateAction])( @@ -160,11 +166,22 @@ class EditableMappingUpdater( } yield (agglomerateId1, agglomerateId2) } + private def getFromSegmentToAgglomerateBuffer(chunkKey: String): Option[Map[Long, Long]] = + segmentToAgglomerateBuffer.get(chunkKey).flatMap { + case (chunkFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(chunkFromBuffer) + } + + private def getFromAgglomerateToGraphBuffer(chunkKey: String): Option[AgglomerateGraph] = + agglomerateToGraphBuffer.get(chunkKey).flatMap { + case (graphFromBuffer, isToBeReverted) => + if (isToBeReverted) None else Some(graphFromBuffer) + } + private def agglomerateIdForSegmentId(segmentId: Long)(implicit ec: ExecutionContext): Fox[Long] = { val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize val chunkKey = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val chunkFromBufferOpt = segmentToAgglomerateBuffer.get(chunkKey) - // TODO isToBeReverted → None + val chunkFromBufferOpt = getFromSegmentToAgglomerateBuffer(chunkKey) for { chunk <- Fox.fillOption(chunkFromBufferOpt) { editableMappingService @@ -203,8 +220,7 @@ class EditableMappingUpdater( private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { val key = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) - val fromBufferOpt = segmentToAgglomerateBuffer.get(key) - // TODO isToBeReverted → None + val fromBufferOpt = getFromSegmentToAgglomerateBuffer(key) Fox.fillOption(fromBufferOpt) { editableMappingService .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) @@ -215,8 +231,7 @@ class EditableMappingUpdater( private def agglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, agglomerateId: Long)( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) - val fromBufferOpt = agglomerateToGraphBuffer.get(key) - // TODO isToBeReverted → None + val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, editableMappingId, @@ -232,6 +247,8 @@ class EditableMappingUpdater( agglomerateToGraphBuffer.put(key, (graph, false)) } + private def emptyAgglomerateGraph = AgglomerateGraph(Seq(), Seq(), Seq(), Seq()) + private def splitGraph(agglomerateId: Long, agglomerateGraph: AgglomerateGraph, update: SplitAgglomerateUpdateAction, @@ -247,7 +264,7 @@ class EditableMappingUpdater( logger.warn( s"Split action for editable mapping $editableMappingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") } - (agglomerateGraph, AgglomerateGraph(Seq(), Seq(), Seq(), Seq())) + (agglomerateGraph, emptyAgglomerateGraph) } else { val graph1Nodes: Set[Long] = computeConnectedComponent(startNode = segmentId1, @@ -408,19 +425,41 @@ class EditableMappingUpdater( editableMappingId, tracingDataStore.editableMappingsSegmentToAgglomerate) _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { - case (chunkKey, chunkDataBeforeRevert, version) => + case (chunkKey, _, version) => if (version > revertAction.sourceVersion) { - // TODO fetch old chunk, save to buffer. if empty, write zero-byte - case Empty => Fox.successful(()) // TODO save zero-byte - case Failure(msg, _, chain) => - Fox.failure(msg, Empty, chain) - Fox.successful(()) + editableMappingService + .getSegmentToAgglomerateChunk(editableMappingId, chunkKey, Some(revertAction.sourceVersion)) + .futureBox + .map { + case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) + case Empty => segmentToAgglomerateBuffer.put(chunkKey, (Map[Long, Long](), true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } } else Fox.successful(()) } agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( editableMappingId, tracingDataStore.editableMappingsAgglomerateToGraph) - // TODO do we need to iterate over old *and* new to get all additions + removals? + _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { + case (graphKey, _, version) => + if (version > revertAction.sourceVersion) { + // TODO agglomerate id from graph key + editableMappingService + .getAgglomerateGraphForId(editableMappingId, + 0L, + remoteFallbackLayer, + userToken, + Some(revertAction.sourceVersion)) + .futureBox + .map { + case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) + case Empty => agglomerateToGraphBuffer.put(graphKey, (emptyAgglomerateGraph, true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } else Fox.successful(()) + } } yield oldInfo } From 130e1f75a2bfca399d3154fb405ba43b6148e7d3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 11:51:22 +0200 Subject: [PATCH 011/361] use right key --- .../EditableMappingElementKeys.scala | 18 ++++++++ .../EditableMappingService.scala | 17 ++----- .../EditableMappingUpdater.scala | 44 ++++++++++--------- 3 files changed, 44 insertions(+), 35 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala new file mode 100644 index 00000000000..af9f7a2a287 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingElementKeys.scala @@ -0,0 +1,18 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import net.liftweb.common.Box +import net.liftweb.common.Box.tryo + +trait EditableMappingElementKeys { + + protected def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = + s"$mappingId/$agglomerateId" + + protected def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = + s"$mappingId/$chunkId" + + protected def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + + protected def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 3b3ca98d6bf..39fe85eb066 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -98,6 +98,7 @@ class EditableMappingService @Inject()( with FallbackDataHelper with FoxImplicits with ReversionHelper + with EditableMappingElementKeys with LazyLogging with ProtoGeometryImplicits { @@ -417,12 +418,10 @@ class EditableMappingService @Inject()( agglomerateId: Long, version: Option[Long]): Fox[Seq[(Long, Long)]] = { val chunkKey = segmentToAgglomerateKey(editableMappingId, agglomerateId) - getSegmentToAgglomerateChunk(editableMappingId, chunkKey, version) + getSegmentToAgglomerateChunk(chunkKey, version) } - def getSegmentToAgglomerateChunk(editableMappingId: String, - chunkKey: String, - version: Option[Long]): Fox[Seq[(Long, Long)]] = + def getSegmentToAgglomerateChunk(chunkKey: String, version: Option[Long]): Fox[Seq[(Long, Long)]] = for { keyValuePairBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsSegmentToAgglomerate .get(chunkKey, version, mayBeEmpty = Some(true)) @@ -587,16 +586,6 @@ class EditableMappingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = - s"$mappingId/$agglomerateId" - - def segmentToAgglomerateKey(mappingId: String, chunkId: Long): String = - s"$mappingId/$chunkId" - - private def chunkIdFromSegmentToAgglomerateKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - - private def agglomerateIdFromAgglomerateGraphKey(key: String): Box[Long] = tryo(key.split("/")(1).toLong) - def getAgglomerateGraphForId(mappingId: String, agglomerateId: Long, remoteFallbackLayer: RemoteFallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 564b2c41df8..55b97b96e8d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -43,6 +43,7 @@ class EditableMappingUpdater( ) extends KeyValueStoreImplicits with ReversionHelper with FoxImplicits + with EditableMappingElementKeys with LazyLogging { // chunkKey → (Map[segmentId → agglomerateId], isToBeReverted) @@ -180,7 +181,7 @@ class EditableMappingUpdater( private def agglomerateIdForSegmentId(segmentId: Long)(implicit ec: ExecutionContext): Fox[Long] = { val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize - val chunkKey = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) + val chunkKey = segmentToAgglomerateKey(editableMappingId, chunkId) val chunkFromBufferOpt = getFromSegmentToAgglomerateBuffer(chunkKey) for { chunk <- Fox.fillOption(chunkFromBufferOpt) { @@ -213,13 +214,12 @@ class EditableMappingUpdater( for { existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" mergedMap = existingChunk ++ segmentIdsToUpdate.map(_ -> agglomerateId).toMap - _ = segmentToAgglomerateBuffer.put(editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId), - (mergedMap, false)) + _ = segmentToAgglomerateBuffer.put(segmentToAgglomerateKey(editableMappingId, chunkId), (mergedMap, false)) } yield () private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { - val key = editableMappingService.segmentToAgglomerateKey(editableMappingId, chunkId) + val key = segmentToAgglomerateKey(editableMappingId, chunkId) val fromBufferOpt = getFromSegmentToAgglomerateBuffer(key) Fox.fillOption(fromBufferOpt) { editableMappingService @@ -230,7 +230,7 @@ class EditableMappingUpdater( private def agglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, agglomerateId: Long)( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) + val key = agglomerateGraphKey(editableMappingId, agglomerateId) val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, @@ -243,7 +243,7 @@ class EditableMappingUpdater( } private def updateAgglomerateGraph(agglomerateId: Long, graph: AgglomerateGraph): Unit = { - val key = editableMappingService.agglomerateGraphKey(editableMappingId, agglomerateId) + val key = agglomerateGraphKey(editableMappingId, agglomerateId) agglomerateToGraphBuffer.put(key, (graph, false)) } @@ -428,7 +428,7 @@ class EditableMappingUpdater( case (chunkKey, _, version) => if (version > revertAction.sourceVersion) { editableMappingService - .getSegmentToAgglomerateChunk(editableMappingId, chunkKey, Some(revertAction.sourceVersion)) + .getSegmentToAgglomerateChunk(chunkKey, Some(revertAction.sourceVersion)) .futureBox .map { case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) @@ -444,20 +444,22 @@ class EditableMappingUpdater( _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { case (graphKey, _, version) => if (version > revertAction.sourceVersion) { - // TODO agglomerate id from graph key - editableMappingService - .getAgglomerateGraphForId(editableMappingId, - 0L, - remoteFallbackLayer, - userToken, - Some(revertAction.sourceVersion)) - .futureBox - .map { - case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) - case Empty => agglomerateToGraphBuffer.put(graphKey, (emptyAgglomerateGraph, true)) - case Failure(msg, _, chain) => - Fox.failure(msg, Empty, chain) - } + for { + agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) + _ <- editableMappingService + .getAgglomerateGraphForId(editableMappingId, + agglomerateId, + remoteFallbackLayer, + userToken, + Some(revertAction.sourceVersion)) + .futureBox + .map { + case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) + case Empty => agglomerateToGraphBuffer.put(graphKey, (emptyAgglomerateGraph, true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } + } yield () } else Fox.successful(()) } } yield oldInfo From 1325cac2b0ad03abd252422f87cc25aeaefc3f06 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 13:47:54 +0200 Subject: [PATCH 012/361] fix version check, fix json serialization of update action --- .../tracings/editablemapping/EditableMappingService.scala | 2 +- .../editablemapping/EditableMappingUpdateActions.scala | 7 +++++++ .../tracings/editablemapping/EditableMappingUpdater.scala | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 39fe85eb066..ff66dd20ac2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -340,7 +340,7 @@ class EditableMappingService @Inject()( Some(batchFrom) )(fromJsonBytes[List[EditableMappingUpdateAction]]) } yield res - } + } ?~> "Failed to fetch editable mapping update actions from fossilDB" flat = updateActionBatches.flatten } yield flat } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 0bde8f4473b..96f01b912da 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -48,6 +48,10 @@ case class RevertToVersionUpdateAction(sourceVersion: Long, actionTimestamp: Opt override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) } +object RevertToVersionUpdateAction { + implicit val jsonFormat: OFormat[RevertToVersionUpdateAction] = Json.format[RevertToVersionUpdateAction] +} + object EditableMappingUpdateAction { implicit object editableMappingUpdateActionFormat extends Format[EditableMappingUpdateAction] { @@ -55,6 +59,7 @@ object EditableMappingUpdateAction { (json \ "name").validate[String].flatMap { case "mergeAgglomerate" => (json \ "value").validate[MergeAgglomerateUpdateAction] case "splitAgglomerate" => (json \ "value").validate[SplitAgglomerateUpdateAction] + case "revertToVersion" => (json \ "value").validate[RevertToVersionUpdateAction] case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") } @@ -63,6 +68,8 @@ object EditableMappingUpdateAction { Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) case s: MergeAgglomerateUpdateAction => Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) + case s: RevertToVersionUpdateAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionUpdateAction.jsonFormat)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 55b97b96e8d..89303d32040 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -414,7 +414,7 @@ class EditableMappingUpdater( private def revertToVersion(revertAction: RevertToVersionUpdateAction)( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { - _ <- bool2Fox(revertAction.sourceVersion >= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" + _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" oldInfo <- editableMappingService.getInfo(editableMappingId, Some(revertAction.sourceVersion), remoteFallbackLayer, From 2e62832b3a827d1aa65a4da0525740d23cbf4a3f Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 14:05:33 +0200 Subject: [PATCH 013/361] wip update route --- fossildb/run.sh | 3 +- .../services/AccessTokenService.scala | 6 ++- .../controllers/DSAnnotationController.scala | 39 ++++++++++++++++++- ...alableminds.webknossos.tracingstore.routes | 1 + 4 files changed, 45 insertions(+), 4 deletions(-) diff --git a/fossildb/run.sh b/fossildb/run.sh index 39a622d2ee8..53f56832c2f 100755 --- a/fossildb/run.sh +++ b/fossildb/run.sh @@ -14,7 +14,6 @@ if [ ! -f "$JAR" ] || [ ! "$CURRENT_VERSION" == "$VERSION" ]; then wget -q --show-progress -O "$JAR" "$URL" fi -# Note that the editableMappings column is no longer used by wk. Still here for backwards compatibility. -COLLECTIONS="skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates" +COLLECTIONS="skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates" exec java -jar "$JAR" -c "$COLLECTIONS" -d "$FOSSILDB_HOME/data" -b "$FOSSILDB_HOME/backup" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 4ba7159c4e3..b21c415b13c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -19,7 +19,7 @@ object AccessMode extends ExtendedEnumeration { object AccessResourceType extends ExtendedEnumeration { type AccessResourceType = Value - val datasource, tracing, webknossos, jobExport = Value + val datasource, tracing, annotation, webknossos, jobExport = Value } case class UserAccessAnswer(granted: Boolean, msg: Option[String] = None) @@ -42,9 +42,13 @@ object UserAccessRequest { def readTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) + def writeTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + def writeAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.write) + def downloadJobExport(jobId: String): UserAccessRequest = UserAccessRequest(DataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index e534925f6c0..e4a5d8093ae 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -1,17 +1,26 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService +import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.{ExecutionContext, Future} +case class GenericUpdateActionGroup(transactionGroupCount: Int) +object GenericUpdateActionGroup { + implicit val jsonFormat: OFormat[GenericUpdateActionGroup] = Json.format[GenericUpdateActionGroup] +} + class DSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -26,6 +35,34 @@ class DSAnnotationController @Inject()( } } } + + def update(annotationId: String, token: Option[String]): Action[List[GenericUpdateActionGroup]] = + Action.async(validateJson[List[GenericUpdateActionGroup]]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId), + urlOrHeaderToken(token, request)) { + val updateGroups = request.body + if (updateGroups.forall(_.transactionGroupCount == 1)) { + //commitUpdates(tracingId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) + Fox.successful(Ok) + } else { + /*updateGroups + .foldLeft(tracingService.currentVersion(tracingId)) { (currentCommittedVersionFox, updateGroup) => + handleUpdateGroupForTransaction(tracingId, + currentCommittedVersionFox, + updateGroup, + urlOrHeaderToken(token, request)) + } + .map(_ => Ok) + + */ + Fox.successful(Ok) + } + } + } + } + } } // get version history diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 566242f4c52..da0b1f93131 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -6,6 +6,7 @@ GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.DSAnnotationController.initialize(annotationId: String, token: Option[String]) +POST /annotation/update @com.scalableminds.webknossos.tracingstore.controllers.DSAnnotationController.update(annotationId: String, token: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) From 950ab983ba7bbee6a4103afcb2043dc5175b6dc6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 9 Jul 2024 14:54:15 +0200 Subject: [PATCH 014/361] annotationUpdates --- .../controllers/DSAnnotationController.scala | 10 +++++++++- .../tracingstore/tracings/TracingDataStore.scala | 2 ++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index e4a5d8093ae..a2f61ca8729 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -44,7 +44,7 @@ class DSAnnotationController @Inject()( urlOrHeaderToken(token, request)) { val updateGroups = request.body if (updateGroups.forall(_.transactionGroupCount == 1)) { - //commitUpdates(tracingId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) + commitUpdates(annotationId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) Fox.successful(Ok) } else { /*updateGroups @@ -63,6 +63,14 @@ class DSAnnotationController @Inject()( } } } + + private def commitUpdates(annotationId: String, + updateGroups: List[GenericUpdateActionGroup], + token: Option[String]): Fox[Unit] = { + val currentCommittedVersion: Fox[Long] = + tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + Fox.successful(()) + } } // get version history diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index 629d79d18a9..4de87d378b3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -22,6 +22,8 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val skeletons = new FossilDBClient("skeletons", config, slackNotificationService) + lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) + lazy val skeletonUpdates = new FossilDBClient("skeletonUpdates", config, slackNotificationService) lazy val volumes = new FossilDBClient("volumes", config, slackNotificationService) From 64e4e94d14f6a6728013da66f73c6bf088a13501 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 15 Jul 2024 12:01:39 +0200 Subject: [PATCH 015/361] WIP: AnnotationTransactionService --- .../tracingstore/TracingStoreModule.scala | 3 + .../AnnotationTransactionService.scala | 156 ++++++++++++++++++ .../annotation/DSAnnotationService.scala | 6 + .../controllers/DSAnnotationController.scala | 26 ++- 4 files changed, 182 insertions(+), 9 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index cd6fb91fc9d..c0d36e2265c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -4,6 +4,7 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationTransactionService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -25,5 +26,7 @@ class TracingStoreModule extends AbstractModule { bind(classOf[EditableMappingService]).asEagerSingleton() bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() + bind(classOf[AnnotationTransactionService]).asEagerSingleton() } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala new file mode 100644 index 00000000000..9dc54bc07e3 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -0,0 +1,156 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.{Fox, JsonHelper} +import com.scalableminds.util.tools.Fox.bool2Fox +import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.tracingstore.controllers.GenericUpdateActionGroup +import play.api.http.Status.CONFLICT +import play.api.libs.json.Json + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration._ + +class AnnotationTransactionService @Inject()( + handledGroupIdStore: TracingStoreRedisStore, // TODO: instantiate here rather than with injection, give fix namespace prefix? + uncommittedUpdatesStore: TracingStoreRedisStore, + annotationService: DSAnnotationService) { + + private val transactionGroupExpiry: FiniteDuration = 24 hours + private val handledGroupCacheExpiry: FiniteDuration = 24 hours + + private def transactionGroupKey(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long) = + s"transactionGroup___${annotationId}___${transactionId}___${transactionGroupIndex}___$version" + + private def handledGroupKey(annotationId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = + s"handledGroup___${annotationId}___${transactionId}___${version}___$transactionGroupIndex" + + private def patternFor(annotationId: String, transactionId: String) = + s"transactionGroup___${annotationId}___${transactionId}___*" + + def saveUncommitted(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long, + updateGroup: GenericUpdateActionGroup, + expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- Fox.runIf(transactionGroupIndex > 0)( + Fox.assertTrue( + uncommittedUpdatesStore.contains(transactionGroupKey( + annotationId, + transactionId, + transactionGroupIndex - 1, + version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) + _ <- uncommittedUpdatesStore.insert( + transactionGroupKey(annotationId, transactionId, transactionGroupIndex, version), + Json.toJson(updateGroup).toString(), + Some(expiry)) + } yield () + + def handleUpdateGroupForTransaction(annotationId: String, + previousVersionFox: Fox[Long], + updateGroup: GenericUpdateActionGroup, + userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + for { + previousCommittedVersion: Long <- previousVersionFox + result <- if (previousCommittedVersion + 1 == updateGroup.version) { + if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { + // Received the last group of this transaction + commitWithPending(annotationId, updateGroup, userToken) + } else { + for { + _ <- saveUncommitted(annotationId, + updateGroup.transactionId, + updateGroup.transactionGroupIndex, + updateGroup.version, + updateGroup, + transactionGroupExpiry) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield previousCommittedVersion // no updates have been committed, do not yield version increase + } + } else { + failUnlessAlreadyHandled(updateGroup, annotationId, previousCommittedVersion) + } + } yield result + + // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction + // and commit them all. + private def commitWithPending(annotationId: String, updateGroup: GenericUpdateActionGroup, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Long] = + for { + previousActionGroupsToCommit <- getAllUncommittedFor(annotationId, updateGroup.transactionId) + _ <- bool2Fox( + previousActionGroupsToCommit + .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." + concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) + commitResult <- annotationService.commitUpdates(annotationId, List(concatenatedGroup), userToken) + _ <- removeAllUncommittedFor(annotationId, updateGroup.transactionId) + } yield commitResult + + private def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = + uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) + + private def getAllUncommittedFor(annotationId: String, transactionId: String): Fox[List[GenericUpdateActionGroup]] = + for { + raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(annotationId, transactionId)) + parsed: Seq[GenericUpdateActionGroup] = raw.flatMap(itemAsString => + JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[GenericUpdateActionGroup])) + } yield parsed.toList.sortBy(_.transactionGroupIndex) + + private def saveToHandledGroupIdStore(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Unit] = { + val key = handledGroupKey(annotationId, transactionId, version, transactionGroupIndex) + handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) + } + + private def handledGroupIdStoreContains(annotationId: String, + transactionId: String, + version: Long, + transactionGroupIndex: Int): Fox[Boolean] = + handledGroupIdStore.contains(handledGroupKey(annotationId, transactionId, version, transactionGroupIndex)) + + private def concatenateUpdateGroupsOfTransaction( + previousActionGroups: List[GenericUpdateActionGroup], + lastActionGroup: GenericUpdateActionGroup): GenericUpdateActionGroup = + if (previousActionGroups.isEmpty) lastActionGroup + else { + val allActionGroups = previousActionGroups :+ lastActionGroup + GenericUpdateActionGroup( + version = lastActionGroup.version, + timestamp = lastActionGroup.timestamp, + authorId = lastActionGroup.authorId, + actions = allActionGroups.flatMap(_.actions), + stats = lastActionGroup.stats, // the latest stats do count + info = lastActionGroup.info, // frontend sets this identically for all groups of transaction + transactionId = f"${lastActionGroup.transactionId}-concatenated", + transactionGroupCount = 1, + transactionGroupIndex = 0, + ) + } + + /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), + * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful + * despite the backend receiving it just fine. + */ + private def failUnlessAlreadyHandled(updateGroup: GenericUpdateActionGroup, tracingId: String, previousVersion: Long)( + implicit ec: ExecutionContext): Fox[Long] = { + val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" + for { + _ <- Fox.assertTrue( + handledGroupIdStoreContains(tracingId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT + } yield updateGroup.version + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index f6dffa0c274..83645cc7e3f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -9,6 +9,8 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } +import com.scalableminds.webknossos.tracingstore.controllers.GenericUpdateActionGroup +import com.scalableminds.webknossos.tracingstore.tracings.UpdateActionGroup import scalapb.GeneratedMessage import javax.inject.Inject @@ -17,6 +19,10 @@ import scala.concurrent.ExecutionContext class DSAnnotationService @Inject()() { def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) + def commitUpdates(tracingId: String, + updateGroups: List[GenericUpdateActionGroup], + userToken: Option[String]): Fox[Long] = ??? + def newestMaterializableVersion(annotationId: String): Fox[Long] = ??? def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index a2f61ca8729..c795f3ad532 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -7,13 +7,18 @@ import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, DSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} -case class GenericUpdateActionGroup(transactionGroupCount: Int) +case class GenericUpdateActionGroup(transactionGroupCount: Int, + transactionGroupIndex: Int, + version: Long, + transactionId: String) object GenericUpdateActionGroup { implicit val jsonFormat: OFormat[GenericUpdateActionGroup] = Json.format[GenericUpdateActionGroup] } @@ -21,6 +26,8 @@ object GenericUpdateActionGroup { class DSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, + annotationService: DSAnnotationService, + transactionService: AnnotationTransactionService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -47,16 +54,15 @@ class DSAnnotationController @Inject()( commitUpdates(annotationId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) Fox.successful(Ok) } else { - /*updateGroups - .foldLeft(tracingService.currentVersion(tracingId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(tracingId, - currentCommittedVersionFox, - updateGroup, - urlOrHeaderToken(token, request)) + updateGroups + .foldLeft(annotationService.newestMaterializableVersion(annotationId)) { + (currentCommittedVersionFox, updateGroup) => + transactionService.handleUpdateGroupForTransaction(annotationId, + currentCommittedVersionFox, + updateGroup, + urlOrHeaderToken(token, request)) } .map(_ => Ok) - - */ Fox.successful(Ok) } } @@ -64,6 +70,8 @@ class DSAnnotationController @Inject()( } } + private val transactionGroupExpiry: FiniteDuration = 24 hours + private def commitUpdates(annotationId: String, updateGroups: List[GenericUpdateActionGroup], token: Option[String]): Fox[Unit] = { From 3b8e39ae38a4b87e23d640d7fb05c3d39ec62a17 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 15 Jul 2024 13:41:08 +0200 Subject: [PATCH 016/361] finalize transaction service --- .../AnnotationTransactionService.scala | 44 +++++++++++++--- .../annotation/DSAnnotationService.scala | 30 ++++++++--- .../annotation/UpdateActions.scala | 28 +++++++++++ .../controllers/DSAnnotationController.scala | 50 ++++--------------- 4 files changed, 98 insertions(+), 54 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 9dc54bc07e3..319cd00feab 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -3,7 +3,6 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.controllers.GenericUpdateActionGroup import play.api.http.Status.CONFLICT import play.api.libs.json.Json @@ -51,10 +50,10 @@ class AnnotationTransactionService @Inject()( Some(expiry)) } yield () - def handleUpdateGroupForTransaction(annotationId: String, - previousVersionFox: Fox[Long], - updateGroup: GenericUpdateActionGroup, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + private def handleUpdateGroupForTransaction(annotationId: String, + previousVersionFox: Fox[Long], + updateGroup: GenericUpdateActionGroup, + userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = for { previousCommittedVersion: Long <- previousVersionFox result <- if (previousCommittedVersion + 1 == updateGroup.version) { @@ -90,7 +89,7 @@ class AnnotationTransactionService @Inject()( previousActionGroupsToCommit .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) - commitResult <- annotationService.commitUpdates(annotationId, List(concatenatedGroup), userToken) + commitResult <- commitUpdates(annotationId, List(concatenatedGroup), userToken) _ <- removeAllUncommittedFor(annotationId, updateGroup.transactionId) } yield commitResult @@ -137,6 +136,39 @@ class AnnotationTransactionService @Inject()( ) } + def handleUpdateGroups(annotationId: String, updateGroups: List[GenericUpdateActionGroup], userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Long] = + if (updateGroups.forall(_.transactionGroupCount == 1)) { + commitUpdates(annotationId, updateGroups, userToken) + } else { + updateGroups.foldLeft(annotationService.currentVersion(annotationId)) { + (currentCommittedVersionFox, updateGroup) => + handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup, userToken) + } + } + + // Perform version check and commit the passed updates + private def commitUpdates(annotationId: String, + updateGroups: List[GenericUpdateActionGroup], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + for { + _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) + currentCommittedVersion: Fox[Long] = annotationService.currentVersion(annotationId) + newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => + previousVersion.flatMap { prevVersion: Long => + if (prevVersion + 1 == updateGroup.version) { + for { + _ <- annotationService.handleUpdateGroup(annotationId, updateGroup, prevVersion, userToken) + _ <- saveToHandledGroupIdStore(annotationId, + updateGroup.transactionId, + updateGroup.version, + updateGroup.transactionGroupIndex) + } yield updateGroup.version + } else failUnlessAlreadyHandled(updateGroup, annotationId, prevVersion) + } + } + } yield newVersion + /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful * despite the backend receiving it just fine. diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index 83645cc7e3f..67f553a572c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.{ AddLayerAnnotationUpdateAction, @@ -9,21 +10,36 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } -import com.scalableminds.webknossos.tracingstore.controllers.GenericUpdateActionGroup -import com.scalableminds.webknossos.tracingstore.tracings.UpdateActionGroup +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import scalapb.GeneratedMessage import javax.inject.Inject import scala.concurrent.ExecutionContext -class DSAnnotationService @Inject()() { +class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient) { def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) - def commitUpdates(tracingId: String, + def reportUpdates(annotationId: String, updateGroups: List[GenericUpdateActionGroup], - userToken: Option[String]): Fox[Long] = ??? - - def newestMaterializableVersion(annotationId: String): Fox[Long] = ??? + userToken: Option[String]): Fox[Unit] = + for { + _ <- remoteWebknossosClient.reportTracingUpdates( + TracingUpdatesReport( + annotationId, + timestamps = updateGroups.map(g => Instant(g.timestamp)), + statistics = updateGroups.flatMap(_.stats).lastOption, + significantChangesCount = updateGroups.map(_.significantChangesCount).sum, + viewChangesCount = updateGroups.map(_.viewChangesCount).sum, + userToken + )) + } yield () + + def currentVersion(annotationId: String): Fox[Long] = ??? + + def handleUpdateGroup(annotationId: String, + updateGroup: GenericUpdateActionGroup, + previousVersion: Long, + userToken: Option[String]): Fox[Unit] = ??? def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( implicit ec: ExecutionContext): Fox[AnnotationProto] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala new file mode 100644 index 00000000000..b32e68c4f50 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -0,0 +1,28 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import play.api.libs.json.{JsObject, Json, OFormat} + +case class GenericUpdateAction(actionTimestamp: Option[Long] = None) + +object GenericUpdateAction { + implicit val jsonFormat: OFormat[GenericUpdateAction] = Json.format[GenericUpdateAction] +} + +case class GenericUpdateActionGroup(version: Long, + timestamp: Long, + authorId: Option[String], + actions: List[GenericUpdateAction], + stats: Option[JsObject], + info: Option[String], + transactionId: String, + transactionGroupCount: Int, + transactionGroupIndex: Int) { + + def significantChangesCount: Int = 1 // TODO + + def viewChangesCount: Int = 1 // TODO +} + +object GenericUpdateActionGroup { + implicit val jsonFormat: OFormat[GenericUpdateActionGroup] = Json.format[GenericUpdateActionGroup] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index c795f3ad532..eff6f83c303 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -1,33 +1,21 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject -import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, DSAnnotationService} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, GenericUpdateActionGroup} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import scala.concurrent.duration._ -import scala.concurrent.{ExecutionContext, Future} - -case class GenericUpdateActionGroup(transactionGroupCount: Int, - transactionGroupIndex: Int, - version: Long, - transactionId: String) -object GenericUpdateActionGroup { - implicit val jsonFormat: OFormat[GenericUpdateActionGroup] = Json.format[GenericUpdateActionGroup] -} +import scala.concurrent.ExecutionContext class DSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, - annotationService: DSAnnotationService, - transactionService: AnnotationTransactionService, + annotationTransactionService: AnnotationTransactionService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -49,36 +37,16 @@ class DSAnnotationController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId), urlOrHeaderToken(token, request)) { - val updateGroups = request.body - if (updateGroups.forall(_.transactionGroupCount == 1)) { - commitUpdates(annotationId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) - Fox.successful(Ok) - } else { - updateGroups - .foldLeft(annotationService.newestMaterializableVersion(annotationId)) { - (currentCommittedVersionFox, updateGroup) => - transactionService.handleUpdateGroupForTransaction(annotationId, - currentCommittedVersionFox, - updateGroup, - urlOrHeaderToken(token, request)) - } - .map(_ => Ok) - Fox.successful(Ok) - } + for { + _ <- annotationTransactionService.handleUpdateGroups(annotationId, + request.body, + urlOrHeaderToken(token, request)) + } yield Ok } } } } - private val transactionGroupExpiry: FiniteDuration = 24 hours - - private def commitUpdates(annotationId: String, - updateGroups: List[GenericUpdateActionGroup], - token: Option[String]): Fox[Unit] = { - val currentCommittedVersion: Fox[Long] = - tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - Fox.successful(()) - } } // get version history From 615d78c8e6d2223cea09f54ec88cc5e70c517763 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 15 Jul 2024 17:13:03 +0200 Subject: [PATCH 017/361] move skeleton updates to GenericUpdateAction trait --- .../annotation/UpdateActions.scala | 10 +- .../updating/SkeletonUpdateActions.scala | 275 ++++++++++-------- 2 files changed, 165 insertions(+), 120 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index b32e68c4f50..062cc51d99e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -2,7 +2,15 @@ package com.scalableminds.webknossos.tracingstore.annotation import play.api.libs.json.{JsObject, Json, OFormat} -case class GenericUpdateAction(actionTimestamp: Option[Long] = None) +trait GenericUpdateAction { + def actionTimestamp: Option[Long] + + def addTimestamp(timestamp: Long): GenericUpdateAction + + def addInfo(info: Option[String]): GenericUpdateAction + + def addAuthorId(authorId: Option[String]): GenericUpdateAction +} object GenericUpdateAction { implicit val jsonFormat: OFormat[GenericUpdateAction] = Json.format[GenericUpdateAction] diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index 1dd5f64d5d9..8a0fb0c75e6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -5,9 +5,12 @@ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate +import com.scalableminds.webknossos.tracingstore.annotation.GenericUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ +trait SkeletonUpdateAction extends GenericUpdateAction + case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], name: String, @@ -21,9 +24,9 @@ case class CreateTreeSkeletonAction(id: Int, info: Option[String] = None, `type`: Option[TreeType] = None, edgesAreVisible: Option[Boolean]) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val newTree = Tree( id, Nil, @@ -39,12 +42,12 @@ case class CreateTreeSkeletonAction(id: Int, edgesAreVisible ) tracing.withTrees(newTree +: tracing.trees) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -52,14 +55,16 @@ case class DeleteTreeSkeletonAction(id: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withTrees(tracing.trees.filter(_.treeId != id)) + extends SkeletonUpdateAction { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withTrees(tracing.trees.filter(_.treeId != id))*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -74,9 +79,9 @@ case class UpdateTreeSkeletonAction(id: Int, actionAuthorId: Option[String] = None, info: Option[String] = None, `type`: Option[TreeType] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy( color = colorOptToProto(color).orElse(tree.color), @@ -89,12 +94,12 @@ case class UpdateTreeSkeletonAction(id: Int, ) tracing.withTrees(mapTrees(tracing, id, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -103,8 +108,9 @@ case class MergeTreeSkeletonAction(sourceId: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + /* // only nodes and edges are merged here, // other properties are managed explicitly // by the frontend with extra actions @@ -117,12 +123,14 @@ case class MergeTreeSkeletonAction(sourceId: Int, } tracing.withTrees(mapTrees(tracing, targetId, treeTransform).filter(_.treeId != sourceId)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -132,8 +140,10 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { + + /* // this should only move a whole component, // that is disjoint from the rest of the tree override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -156,11 +166,14 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], tracing.withTrees(tracing.trees.map(selectTree)) } + */ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -170,17 +183,19 @@ case class CreateEdgeSkeletonAction(source: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withEdges(Edge(source, target) +: tree.edges) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -190,17 +205,19 @@ case class DeleteEdgeSkeletonAction(source: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edges = tree.edges.filter(_ != Edge(source, target))) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -218,10 +235,10 @@ case class CreateNodeSkeletonAction(id: Int, actionAuthorId: Option[String] = None, info: Option[String] = None, additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val rotationOrDefault = rotation getOrElse NodeDefaults.rotation val newNode = Node( id, @@ -239,12 +256,14 @@ case class CreateNodeSkeletonAction(id: Int, def treeTransform(tree: Tree) = tree.withNodes(newNode +: tree.nodes) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -262,10 +281,10 @@ case class UpdateNodeSkeletonAction(id: Int, actionAuthorId: Option[String] = None, info: Option[String] = None, additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /* override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val rotationOrDefault = rotation getOrElse NodeDefaults.rotation val newNode = Node( @@ -286,11 +305,14 @@ case class UpdateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } + */ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -300,37 +322,41 @@ case class DeleteNodeSkeletonAction(nodeId: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withNodes(tree.nodes.filter(_.id != nodeId)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) } case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withTreeGroups(treeGroups.map(convertTreeGroup)) + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withTreeGroups(treeGroups.map(convertTreeGroup))*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) } case class UpdateTracingSkeletonAction(activeNode: Option[Int], @@ -342,9 +368,9 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], actionAuthorId: Option[String] = None, info: Option[String] = None, editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with ProtoGeometryImplicits { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.copy( editPosition = editPosition, editRotation = editRotation, @@ -352,28 +378,31 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], userBoundingBox = userBoundingBox, activeNodeId = activeNode, editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) - ) + )*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } case class RevertToVersionAction(sourceVersion: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - throw new Exception("RevertToVersionAction applied on unversioned tracing") + extends SkeletonUpdateAction { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + throw new Exception("RevertToVersionAction applied on unversioned tracing")*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -382,20 +411,21 @@ case class UpdateTreeVisibility(treeId: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(isVisible = Some(isVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], @@ -403,9 +433,9 @@ case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateTreeGroups(treeGroups: Seq[TreeGroup]) = { def treeTransform(tree: Tree) = if (treeGroups.exists(group => tree.groupId.contains(group.groupId))) @@ -425,14 +455,15 @@ case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], .map(group => updateTreeGroups(GroupUtils.getAllChildrenTreeGroups(group))) .getOrElse(tracing) } - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } case class UpdateTreeEdgesVisibility(treeId: Int, @@ -440,34 +471,38 @@ case class UpdateTreeEdgesVisibility(treeId: Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction + extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edgesAreVisible = Some(edgesAreVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) + extends SkeletonUpdateAction { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) } @@ -476,8 +511,8 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + extends SkeletonUpdateAction { + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateUserBoundingBoxes() = tracing.userBoundingBoxes.map { boundingBox => if (boundingBoxId.forall(_ == boundingBox.id)) @@ -487,29 +522,31 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], } tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) - } + }*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } case class UpdateTdCamera(actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends UpdateAction.SkeletonUpdateAction { + extends SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing + /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing*/ - override def addTimestamp(timestamp: Long): UpdateAction[SkeletonTracing] = + override def addTimestamp(timestamp: Long): GenericUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction[SkeletonTracing] = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction[SkeletonTracing] = + + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): GenericUpdateAction = this.copy(actionAuthorId = authorId) - override def isViewOnlyChange: Boolean = true } object CreateTreeSkeletonAction { @@ -568,10 +605,10 @@ object UpdateUserBoundingBoxVisibility { } object UpdateTdCamera { implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } -object SkeletonUpdateAction { +object GenericUpdateAction { - implicit object skeletonUpdateActionFormat extends Format[UpdateAction[SkeletonTracing]] { - override def reads(json: JsValue): JsResult[UpdateAction.SkeletonUpdateAction] = { + implicit object genericUpdateActionFormat extends Format[GenericUpdateAction] { + override def reads(json: JsValue): JsResult[GenericUpdateAction] = { val jsonValue = (json \ "value").as[JsObject] (json \ "name").as[String] match { case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) @@ -605,7 +642,7 @@ object SkeletonUpdateAction { private val positionTransform = (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - override def writes(a: UpdateAction[SkeletonTracing]): JsObject = a match { + override def writes(a: GenericUpdateAction): JsObject = a match { case s: CreateTreeSkeletonAction => Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) case s: DeleteTreeSkeletonAction => From 313d791c5f84c41a16a0bcd2ee11ce18564ad69d Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 16 Jul 2024 11:53:43 +0200 Subject: [PATCH 018/361] further move skeleton update actions --- .../annotation/DSAnnotationService.scala | 20 +- .../annotation/UpdateActions.scala | 108 +++++++++- .../skeleton/SkeletonTracingService.scala | 2 +- .../updating/SkeletonUpdateActions.scala | 188 ++++++------------ 4 files changed, 180 insertions(+), 138 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index 67f553a572c..4813926c08e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -10,13 +10,17 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import scalapb.GeneratedMessage import javax.inject.Inject import scala.concurrent.ExecutionContext -class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient) { +class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, + tracingDataStore: TracingDataStore) + extends KeyValueStoreImplicits { def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) def reportUpdates(annotationId: String, @@ -37,9 +41,19 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def currentVersion(annotationId: String): Fox[Long] = ??? def handleUpdateGroup(annotationId: String, - updateGroup: GenericUpdateActionGroup, + updateActionGroup: GenericUpdateActionGroup, previousVersion: Long, - userToken: Option[String]): Fox[Unit] = ??? + userToken: Option[String]): Fox[Unit] = + // TODO apply volume updates directly? transform to compact? + tracingDataStore.annotationUpdates.put( + annotationId, + updateActionGroup.version, + updateActionGroup.actions + .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info + case Nil => List[GenericUpdateAction]() + case first :: rest => first.addInfo(updateActionGroup.info) :: rest + } + ) def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( implicit ec: ExecutionContext): Fox[AnnotationProto] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 062cc51d99e..b8dd3651321 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -1,6 +1,27 @@ package com.scalableminds.webknossos.tracingstore.annotation -import play.api.libs.json.{JsObject, Json, OFormat} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateEdgeSkeletonAction, + CreateNodeSkeletonAction, + CreateTreeSkeletonAction, + DeleteEdgeSkeletonAction, + DeleteNodeSkeletonAction, + DeleteTreeSkeletonAction, + MergeTreeSkeletonAction, + MoveTreeComponentSkeletonAction, + RevertToVersionSkeletonAction, + UpdateNodeSkeletonAction, + UpdateTdCameraSkeletonAction, + UpdateTracingSkeletonAction, + UpdateTreeEdgesVisibilitySkeletonAction, + UpdateTreeGroupVisibilitySkeletonAction, + UpdateTreeGroupsSkeletonAction, + UpdateTreeSkeletonAction, + UpdateTreeVisibilitySkeletonAction, + UpdateUserBoundingBoxVisibilitySkeletonAction, + UpdateUserBoundingBoxesSkeletonAction +} +import play.api.libs.json.{Format, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} trait GenericUpdateAction { def actionTimestamp: Option[Long] @@ -13,7 +34,90 @@ trait GenericUpdateAction { } object GenericUpdateAction { - implicit val jsonFormat: OFormat[GenericUpdateAction] = Json.format[GenericUpdateAction] + + implicit object genericUpdateActionFormat extends Format[GenericUpdateAction] { + override def reads(json: JsValue): JsResult[GenericUpdateAction] = { + val jsonValue = (json \ "value").as[JsObject] + (json \ "name").as[String] match { + case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) + case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) + case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) + case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) + case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) + case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) + case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) + case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) + case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) + case "updateTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionSkeletonAction](jsonValue) + case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) + case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) + case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) + case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) + case "updateUserBoundingBoxVisibility" => + deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) + } + } + + private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( + implicit tjs: Reads[T]): JsResult[T] = + if (shouldTransformPositions) + json.transform(positionTransform).get.validate[T] + else + json.validate[T] + + private val positionTransform = + (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) + + override def writes(a: GenericUpdateAction): JsObject = a match { + case s: CreateTreeSkeletonAction => + Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) + case s: DeleteTreeSkeletonAction => + Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) + case s: UpdateTreeSkeletonAction => + Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) + case s: MergeTreeSkeletonAction => + Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) + case s: MoveTreeComponentSkeletonAction => + Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) + case s: CreateNodeSkeletonAction => + Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) + case s: DeleteNodeSkeletonAction => + Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) + case s: UpdateNodeSkeletonAction => + Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) + case s: CreateEdgeSkeletonAction => + Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) + case s: DeleteEdgeSkeletonAction => + Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) + case s: UpdateTreeGroupsSkeletonAction => + Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) + case s: UpdateTracingSkeletonAction => + Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) + case s: RevertToVersionSkeletonAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionSkeletonAction.jsonFormat)) + case s: UpdateTreeVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeVisibility", + "value" -> Json.toJson(s)(UpdateTreeVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeGroupVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeGroupVisibility", + "value" -> Json.toJson(s)(UpdateTreeGroupVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTreeEdgesVisibilitySkeletonAction => + Json.obj("name" -> "updateTreeEdgesVisibility", + "value" -> Json.toJson(s)(UpdateTreeEdgesVisibilitySkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxesSkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxes", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesSkeletonAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilitySkeletonAction => + Json.obj("name" -> "updateUserBoundingBoxVisibility", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) + case s: UpdateTdCameraSkeletonAction => + Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) + } + } } case class GenericUpdateActionGroup(version: Long, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 66154842b1d..5be705493fa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -118,7 +118,7 @@ class SkeletonTracingService @Inject()( case Full(tracing) => remainingUpdates match { case List() => Fox.successful(tracing) - case RevertToVersionAction(sourceVersion, _, _, _) :: tail => + case RevertToVersionSkeletonAction(sourceVersion, _, _, _) :: tail => val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) updateIter(sourceTracing, tail) case update :: tail => updateIter(Full(update.applyOn(tracing)), tail) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index 8a0fb0c75e6..f449ddd32ec 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -1,9 +1,8 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating -import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} -import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits} +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.tracingstore.annotation.GenericUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType @@ -389,10 +388,10 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], this.copy(actionAuthorId = authorId) } -case class RevertToVersionAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class RevertToVersionSkeletonAction(sourceVersion: Long, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = throw new Exception("RevertToVersionAction applied on unversioned tracing")*/ @@ -406,11 +405,11 @@ case class RevertToVersionAction(sourceVersion: Long, this.copy(actionAuthorId = authorId) } -case class UpdateTreeVisibility(treeId: Int, - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateTreeVisibilitySkeletonAction(treeId: Int, + isVisible: Boolean, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -428,11 +427,11 @@ case class UpdateTreeVisibility(treeId: Int, this.copy(actionAuthorId = authorId) } -case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], + isVisible: Boolean, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -466,11 +465,11 @@ case class UpdateTreeGroupVisibility(treeGroupId: Option[Int], this.copy(actionAuthorId = authorId) } -case class UpdateTreeEdgesVisibility(treeId: Int, - edgesAreVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, + edgesAreVisible: Boolean, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { @@ -489,10 +488,10 @@ case class UpdateTreeEdgesVisibility(treeId: Int, this.copy(actionAuthorId = authorId) } -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ @@ -506,11 +505,11 @@ case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], this.copy(actionAuthorId = authorId) } -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateUserBoundingBoxes() = @@ -533,9 +532,9 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], this.copy(actionAuthorId = authorId) } -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateTdCameraSkeletonAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends SkeletonUpdateAction { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing*/ @@ -585,103 +584,28 @@ object UpdateTreeGroupsSkeletonAction { object UpdateTracingSkeletonAction { implicit val jsonFormat: OFormat[UpdateTracingSkeletonAction] = Json.format[UpdateTracingSkeletonAction] } -object RevertToVersionAction { - implicit val jsonFormat: OFormat[RevertToVersionAction] = Json.format[RevertToVersionAction] -} -object UpdateTreeVisibility { - implicit val jsonFormat: OFormat[UpdateTreeVisibility] = Json.format[UpdateTreeVisibility] -} -object UpdateTreeGroupVisibility { - implicit val jsonFormat: OFormat[UpdateTreeGroupVisibility] = Json.format[UpdateTreeGroupVisibility] -} -object UpdateTreeEdgesVisibility { - implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibility] = Json.format[UpdateTreeEdgesVisibility] -} -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] -} -object UpdateTdCamera { implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] } - -object GenericUpdateAction { - - implicit object genericUpdateActionFormat extends Format[GenericUpdateAction] { - override def reads(json: JsValue): JsResult[GenericUpdateAction] = { - val jsonValue = (json \ "value").as[JsObject] - (json \ "name").as[String] match { - case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) - case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) - case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) - case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) - case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) - case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) - case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) - case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) - case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionAction](jsonValue) - case "updateTreeVisibility" => deserialize[UpdateTreeVisibility](jsonValue) - case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibility](jsonValue) - case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibility](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxes](jsonValue) - case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibility](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCamera](jsonValue) - } - } - - def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)(implicit tjs: Reads[T]): JsResult[T] = - if (shouldTransformPositions) - json.transform(positionTransform).get.validate[T] - else - json.validate[T] - - private val positionTransform = - (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - - override def writes(a: GenericUpdateAction): JsObject = a match { - case s: CreateTreeSkeletonAction => - Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) - case s: DeleteTreeSkeletonAction => - Json.obj("name" -> "deleteTree", "value" -> Json.toJson(s)(DeleteTreeSkeletonAction.jsonFormat)) - case s: UpdateTreeSkeletonAction => - Json.obj("name" -> "updateTree", "value" -> Json.toJson(s)(UpdateTreeSkeletonAction.jsonFormat)) - case s: MergeTreeSkeletonAction => - Json.obj("name" -> "mergeTree", "value" -> Json.toJson(s)(MergeTreeSkeletonAction.jsonFormat)) - case s: MoveTreeComponentSkeletonAction => - Json.obj("name" -> "moveTreeComponent", "value" -> Json.toJson(s)(MoveTreeComponentSkeletonAction.jsonFormat)) - case s: CreateNodeSkeletonAction => - Json.obj("name" -> "createNode", "value" -> Json.toJson(s)(CreateNodeSkeletonAction.jsonFormat)) - case s: DeleteNodeSkeletonAction => - Json.obj("name" -> "deleteNode", "value" -> Json.toJson(s)(DeleteNodeSkeletonAction.jsonFormat)) - case s: UpdateNodeSkeletonAction => - Json.obj("name" -> "updateNode", "value" -> Json.toJson(s)(UpdateNodeSkeletonAction.jsonFormat)) - case s: CreateEdgeSkeletonAction => - Json.obj("name" -> "createEdge", "value" -> Json.toJson(s)(CreateEdgeSkeletonAction.jsonFormat)) - case s: DeleteEdgeSkeletonAction => - Json.obj("name" -> "deleteEdge", "value" -> Json.toJson(s)(DeleteEdgeSkeletonAction.jsonFormat)) - case s: UpdateTreeGroupsSkeletonAction => - Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) - case s: UpdateTracingSkeletonAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) - case s: RevertToVersionAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAction.jsonFormat)) - case s: UpdateTreeVisibility => - Json.obj("name" -> "updateTreeVisibility", "value" -> Json.toJson(s)(UpdateTreeVisibility.jsonFormat)) - case s: UpdateTreeGroupVisibility => - Json.obj("name" -> "updateTreeGroupVisibility", "value" -> Json.toJson(s)(UpdateTreeGroupVisibility.jsonFormat)) - case s: UpdateTreeEdgesVisibility => - Json.obj("name" -> "updateTreeEdgesVisibility", "value" -> Json.toJson(s)(UpdateTreeEdgesVisibility.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - } - } +object RevertToVersionSkeletonAction { + implicit val jsonFormat: OFormat[RevertToVersionSkeletonAction] = Json.format[RevertToVersionSkeletonAction] +} +object UpdateTreeVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeVisibilitySkeletonAction] = Json.format[UpdateTreeVisibilitySkeletonAction] +} +object UpdateTreeGroupVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeGroupVisibilitySkeletonAction] = + Json.format[UpdateTreeGroupVisibilitySkeletonAction] +} +object UpdateTreeEdgesVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateTreeEdgesVisibilitySkeletonAction] = + Json.format[UpdateTreeEdgesVisibilitySkeletonAction] +} +object UpdateUserBoundingBoxesSkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesSkeletonAction] = + Json.format[UpdateUserBoundingBoxesSkeletonAction] +} +object UpdateUserBoundingBoxVisibilitySkeletonAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilitySkeletonAction] = + Json.format[UpdateUserBoundingBoxVisibilitySkeletonAction] +} +object UpdateTdCameraSkeletonAction { + implicit val jsonFormat: OFormat[UpdateTdCameraSkeletonAction] = Json.format[UpdateTdCameraSkeletonAction] } From b6a066df37f4209d290156670b88e6051f58ad0b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 16 Jul 2024 13:47:02 +0200 Subject: [PATCH 019/361] json for volume update actions --- .../annotation/UpdateActions.scala | 40 +++- .../controllers/VolumeTracingController.scala | 4 +- .../tracingstore/tracings/UpdateActions.scala | 5 - .../tracings/volume/VolumeUpdateActions.scala | 175 ++++++++---------- 4 files changed, 114 insertions(+), 110 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index b8dd3651321..212fd9c4c84 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -21,7 +21,23 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ UpdateUserBoundingBoxVisibilitySkeletonAction, UpdateUserBoundingBoxesSkeletonAction } -import play.api.libs.json.{Format, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + CreateSegmentVolumeAction, + DeleteSegmentDataVolumeAction, + DeleteSegmentVolumeAction, + ImportVolumeData, + RemoveFallbackLayer, + RevertToVersionVolumeAction, + UpdateBucketVolumeAction, + UpdateMappingNameVolumeAction, + UpdateSegmentGroupsVolumeAction, + UpdateSegmentVolumeAction, + UpdateTdCamera, + UpdateTracingVolumeAction, + UpdateUserBoundingBoxVisibility, + UpdateUserBoundingBoxes +} +import play.api.libs.json.{Format, JsError, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} trait GenericUpdateAction { def actionTimestamp: Option[Long] @@ -31,6 +47,8 @@ trait GenericUpdateAction { def addInfo(info: Option[String]): GenericUpdateAction def addAuthorId(authorId: Option[String]): GenericUpdateAction + + def isViewOnlyChange: Boolean = false } object GenericUpdateAction { @@ -50,16 +68,28 @@ object GenericUpdateAction { case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionSkeletonAction](jsonValue) + case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) - } + case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) + case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) + case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxes](jsonValue) + case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibility](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayer](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeData](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCamera](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + } // TODO revertToVersion } private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 8280f681dd8..dcfd07b026d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -37,7 +37,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MergedVolumeStats, ResolutionRestrictions, TSFullMeshService, - UpdateMappingNameAction, + UpdateMappingNameVolumeAction, VolumeDataZipFormat, VolumeSegmentIndexService, VolumeSegmentStatisticsService, @@ -368,7 +368,7 @@ class VolumeTracingController @Inject()( _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" (editableMappingId, editableMappingInfo) <- editableMappingService.create( baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameAction(Some(editableMappingId), + volumeUpdate = UpdateMappingNameVolumeAction(Some(editableMappingId), isEditable = Some(true), isLocked = Some(true), actionTimestamp = Some(System.currentTimeMillis())) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala index aebd371ae76..878b9cef553 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala @@ -26,11 +26,6 @@ trait UpdateAction[T <: GeneratedMessage] { def isViewOnlyChange: Boolean = false } -object UpdateAction { - type SkeletonUpdateAction = UpdateAction[SkeletonTracing] - type VolumeUpdateAction = UpdateAction[VolumeTracing] -} - case class UpdateActionGroup[T <: GeneratedMessage]( version: Long, timestamp: Long, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 2cb0f710558..c540f85d13b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -3,11 +3,10 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import java.util.Base64 import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} -import com.scalableminds.webknossos.datastore.geometry import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.VolumeUpdateAction -import com.scalableminds.webknossos.tracingstore.tracings.{NamedBoundingBox, UpdateAction} +import com.scalableminds.webknossos.tracingstore.annotation.GenericUpdateAction +import com.scalableminds.webknossos.tracingstore.tracings.NamedBoundingBox import play.api.libs.json._ trait VolumeUpdateActionHelper { @@ -23,6 +22,8 @@ trait VolumeUpdateActionHelper { } +trait VolumeUpdateAction extends GenericUpdateAction + trait ApplyableVolumeAction extends VolumeUpdateAction case class UpdateBucketVolumeAction(position: Vec3Int, @@ -39,8 +40,9 @@ case class UpdateBucketVolumeAction(position: Vec3Int, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = + def transformToCompact: CompactVolumeUpdateAction = CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) } @@ -62,9 +64,7 @@ case class UpdateTracingVolumeAction( override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTracing", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true } @@ -82,11 +82,7 @@ case class RevertToVersionVolumeAction(sourceVersion: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("revertToVersion", - actionTimestamp, - actionAuthorId, - Json.obj("sourceVersion" -> sourceVersion)) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) } object RevertToVersionVolumeAction { @@ -102,12 +98,10 @@ case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxes", actionTimestamp, actionAuthorId, Json.obj()) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ } object UpdateUserBoundingBoxes { @@ -123,15 +117,11 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateUserBoundingBoxVisibility", - actionTimestamp, - actionAuthorId, - Json.obj("boundingBoxId" -> boundingBoxId, "newVisibility" -> isVisible)) override def isViewOnlyChange: Boolean = true - override def applyOn(tracing: VolumeTracing): VolumeTracing = { + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { def updateUserBoundingBoxes(): Seq[geometry.NamedBoundingBoxProto] = tracing.userBoundingBoxes.map { boundingBox => @@ -142,7 +132,7 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], } tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) - } + }*/ } object UpdateUserBoundingBoxVisibility { @@ -156,12 +146,10 @@ case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("removeFallbackLayer", actionTimestamp, actionAuthorId, Json.obj()) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.clearFallbackLayer + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.clearFallbackLayer*/ } object RemoveFallbackLayer { @@ -176,15 +164,10 @@ case class ImportVolumeData(largestSegmentId: Option[Long], override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("importVolumeTracing", - actionTimestamp, - actionAuthorId, - Json.obj("largestSegmentId" -> largestSegmentId)) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.copy(largestSegmentId = largestSegmentId) + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy(largestSegmentId = largestSegmentId)*/ } object ImportVolumeData { @@ -196,15 +179,13 @@ case class AddSegmentIndex(actionTimestamp: Option[Long] = None, info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("addSegmentIndex", actionTimestamp, actionAuthorId, Json.obj()) + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy(hasSegmentIndex = Some(true))*/ - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.copy(hasSegmentIndex = Some(true)) } object AddSegmentIndex { @@ -220,9 +201,7 @@ case class UpdateTdCamera(actionTimestamp: Option[Long] = None, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateTdCamera", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true } @@ -239,7 +218,8 @@ case class CreateSegmentVolumeAction(id: Long, creationTime: Option[Long], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + info: Option[String] = None) extends ApplyableVolumeAction with ProtoGeometryImplicits { @@ -247,11 +227,9 @@ case class CreateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("createSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = { + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = Segment(id, anchorPosition.map(vec3IntToProto), @@ -261,7 +239,7 @@ case class CreateSegmentVolumeAction(id: Long, groupId, AdditionalCoordinate.toProto(additionalCoordinates)) tracing.addSegments(newSegment) - } + }*/ } object CreateSegmentVolumeAction { @@ -276,7 +254,8 @@ case class UpdateSegmentVolumeAction(id: Long, groupId: Option[Int], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + info: Option[String] = None) extends ApplyableVolumeAction with ProtoGeometryImplicits with VolumeUpdateActionHelper { @@ -285,11 +264,9 @@ case class UpdateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = { + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = segment.copy( anchorPosition = anchorPosition.map(vec3IntToProto), @@ -300,7 +277,7 @@ case class UpdateSegmentVolumeAction(id: Long, anchorPositionAdditionalCoordinates = AdditionalCoordinate.toProto(additionalCoordinates) ) tracing.withSegments(mapSegments(tracing, id, segmentTransform)) - } + }*/ } object UpdateSegmentVolumeAction { @@ -309,19 +286,18 @@ object UpdateSegmentVolumeAction { case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) - extends ApplyableVolumeAction { + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends VolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("deleteSegment", actionTimestamp, actionAuthorId, Json.obj("id" -> id)) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegments(tracing.segments.filter(_.segmentId != id)) + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegments(tracing.segments.filter(_.segmentId != id))*/ } @@ -331,53 +307,54 @@ object DeleteSegmentVolumeAction { case class DeleteSegmentDataVolumeAction(id: Long, actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None) + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends VolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - - override def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("deleteSegmentData", actionTimestamp, actionAuthorId, Json.obj()) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) } object DeleteSegmentDataVolumeAction { implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] } -case class UpdateMappingNameAction(mappingName: Option[String], - isEditable: Option[Boolean], - isLocked: Option[Boolean], - actionTimestamp: Option[Long], - actionAuthorId: Option[String] = None) +case class UpdateMappingNameVolumeAction(mappingName: Option[String], + isEditable: Option[Boolean], + isLocked: Option[Boolean], + actionTimestamp: Option[Long], + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - - override def transformToCompact: UpdateAction[VolumeTracing] = - CompactVolumeUpdateAction("updateMappingName", - actionTimestamp, - actionAuthorId, - Json.obj("mappingName" -> mappingName)) + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def applyOn(tracing: VolumeTracing): VolumeTracing = + /* override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked else tracing.copy(mappingName = mappingName, mappingIsEditable = Some(isEditable.getOrElse(false)), - mappingIsLocked = Some(isLocked.getOrElse(false))) + mappingIsLocked = Some(isLocked.getOrElse(false)))*/ } -object UpdateMappingNameAction { - implicit val jsonFormat: OFormat[UpdateMappingNameAction] = Json.format[UpdateMappingNameAction] +object UpdateMappingNameVolumeAction { + implicit val jsonFormat: OFormat[UpdateMappingNameVolumeAction] = Json.format[UpdateMappingNameVolumeAction] } case class CompactVolumeUpdateAction(name: String, actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, - value: JsObject) - extends VolumeUpdateAction + value: JsObject, + info: Option[String] = None) + extends VolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) +} object CompactVolumeUpdateAction { implicit object compactVolumeUpdateActionFormat extends Format[CompactVolumeUpdateAction] { @@ -386,8 +363,11 @@ object CompactVolumeUpdateAction { name <- (json \ "name").validate[String] actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] - value <- (json \ "value").validate[JsObject].map(_ - "actionTimestamp") - } yield CompactVolumeUpdateAction(name, actionTimestamp, actionAuthorId, value) + info <- (json \ "value" \ "info").validateOpt[String] + value <- (json \ "value") + .validate[JsObject] + .map(_ - "actionTimestamp") // TODO also separate out info + actionAuthorId + } yield CompactVolumeUpdateAction(name, actionTimestamp, actionAuthorId, value, info) override def writes(o: CompactVolumeUpdateAction): JsValue = Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) @@ -398,16 +378,15 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction + extends VolumeUpdateAction with VolumeUpdateActionHelper { - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup))*/ - override def addTimestamp(timestamp: Long): UpdateAction[VolumeTracing] = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): UpdateAction[VolumeTracing] = + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction[VolumeTracing] = this.copy(info = info) + override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) } object UpdateSegmentGroupsVolumeAction { @@ -432,7 +411,7 @@ object VolumeUpdateAction { case "updateSegmentGroups" => (json \ "value").validate[UpdateSegmentGroupsVolumeAction] case "deleteSegment" => (json \ "value").validate[DeleteSegmentVolumeAction] case "deleteSegmentData" => (json \ "value").validate[DeleteSegmentDataVolumeAction] - case "updateMappingName" => (json \ "value").validate[UpdateMappingNameAction] + case "updateMappingName" => (json \ "value").validate[UpdateMappingNameVolumeAction] case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") } From 398bc63030e8839f19e22173cab8ffac75a84b3e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 16 Jul 2024 13:49:49 +0200 Subject: [PATCH 020/361] cleanup --- .../AnnotationTransactionService.scala | 24 ++-- .../annotation/DSAnnotationService.scala | 13 +- .../annotation/UpdateActions.scala | 39 +++--- .../controllers/DSAnnotationController.scala | 6 +- .../tracingstore/tracings/UpdateActions.scala | 86 ------------- .../updating/SkeletonUpdateActions.scala | 118 +++++++++--------- .../tracings/volume/VolumeUpdateActions.scala | 36 +++--- 7 files changed, 116 insertions(+), 206 deletions(-) delete mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 319cd00feab..a29f90e1f0a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -34,7 +34,7 @@ class AnnotationTransactionService @Inject()( transactionId: String, transactionGroupIndex: Int, version: Long, - updateGroup: GenericUpdateActionGroup, + updateGroup: UpdateActionGroup, expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.runIf(transactionGroupIndex > 0)( @@ -52,7 +52,7 @@ class AnnotationTransactionService @Inject()( private def handleUpdateGroupForTransaction(annotationId: String, previousVersionFox: Fox[Long], - updateGroup: GenericUpdateActionGroup, + updateGroup: UpdateActionGroup, userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = for { previousCommittedVersion: Long <- previousVersionFox @@ -81,7 +81,7 @@ class AnnotationTransactionService @Inject()( // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction // and commit them all. - private def commitWithPending(annotationId: String, updateGroup: GenericUpdateActionGroup, userToken: Option[String])( + private def commitWithPending(annotationId: String, updateGroup: UpdateActionGroup, userToken: Option[String])( implicit ec: ExecutionContext): Fox[Long] = for { previousActionGroupsToCommit <- getAllUncommittedFor(annotationId, updateGroup.transactionId) @@ -96,11 +96,11 @@ class AnnotationTransactionService @Inject()( private def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) - private def getAllUncommittedFor(annotationId: String, transactionId: String): Fox[List[GenericUpdateActionGroup]] = + private def getAllUncommittedFor(annotationId: String, transactionId: String): Fox[List[UpdateActionGroup]] = for { raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(annotationId, transactionId)) - parsed: Seq[GenericUpdateActionGroup] = raw.flatMap(itemAsString => - JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[GenericUpdateActionGroup])) + parsed: Seq[UpdateActionGroup] = raw.flatMap(itemAsString => + JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup])) } yield parsed.toList.sortBy(_.transactionGroupIndex) private def saveToHandledGroupIdStore(annotationId: String, @@ -118,12 +118,12 @@ class AnnotationTransactionService @Inject()( handledGroupIdStore.contains(handledGroupKey(annotationId, transactionId, version, transactionGroupIndex)) private def concatenateUpdateGroupsOfTransaction( - previousActionGroups: List[GenericUpdateActionGroup], - lastActionGroup: GenericUpdateActionGroup): GenericUpdateActionGroup = + previousActionGroups: List[UpdateActionGroup], + lastActionGroup: UpdateActionGroup): UpdateActionGroup = if (previousActionGroups.isEmpty) lastActionGroup else { val allActionGroups = previousActionGroups :+ lastActionGroup - GenericUpdateActionGroup( + UpdateActionGroup( version = lastActionGroup.version, timestamp = lastActionGroup.timestamp, authorId = lastActionGroup.authorId, @@ -136,7 +136,7 @@ class AnnotationTransactionService @Inject()( ) } - def handleUpdateGroups(annotationId: String, updateGroups: List[GenericUpdateActionGroup], userToken: Option[String])( + def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String])( implicit ec: ExecutionContext): Fox[Long] = if (updateGroups.forall(_.transactionGroupCount == 1)) { commitUpdates(annotationId, updateGroups, userToken) @@ -149,7 +149,7 @@ class AnnotationTransactionService @Inject()( // Perform version check and commit the passed updates private def commitUpdates(annotationId: String, - updateGroups: List[GenericUpdateActionGroup], + updateGroups: List[UpdateActionGroup], userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = for { _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) @@ -173,7 +173,7 @@ class AnnotationTransactionService @Inject()( * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful * despite the backend receiving it just fine. */ - private def failUnlessAlreadyHandled(updateGroup: GenericUpdateActionGroup, tracingId: String, previousVersion: Long)( + private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, tracingId: String, previousVersion: Long)( implicit ec: ExecutionContext): Fox[Long] = { val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index 4813926c08e..e26c5c123d8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -10,8 +10,7 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import scalapb.GeneratedMessage @@ -23,9 +22,7 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl extends KeyValueStoreImplicits { def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) - def reportUpdates(annotationId: String, - updateGroups: List[GenericUpdateActionGroup], - userToken: Option[String]): Fox[Unit] = + def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = for { _ <- remoteWebknossosClient.reportTracingUpdates( TracingUpdatesReport( @@ -41,16 +38,16 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def currentVersion(annotationId: String): Fox[Long] = ??? def handleUpdateGroup(annotationId: String, - updateActionGroup: GenericUpdateActionGroup, + updateActionGroup: UpdateActionGroup, previousVersion: Long, userToken: Option[String]): Fox[Unit] = - // TODO apply volume updates directly? transform to compact? + // TODO apply some updates directly? transform to compact? tracingDataStore.annotationUpdates.put( annotationId, updateActionGroup.version, updateActionGroup.actions .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info - case Nil => List[GenericUpdateAction]() + case Nil => List[UpdateAction]() case first :: rest => first.addInfo(updateActionGroup.info) :: rest } ) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 212fd9c4c84..169f6e9d6f1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -27,7 +27,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ DeleteSegmentVolumeAction, ImportVolumeData, RemoveFallbackLayer, - RevertToVersionVolumeAction, UpdateBucketVolumeAction, UpdateMappingNameVolumeAction, UpdateSegmentGroupsVolumeAction, @@ -39,22 +38,22 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ } import play.api.libs.json.{Format, JsError, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} -trait GenericUpdateAction { +trait UpdateAction { def actionTimestamp: Option[Long] - def addTimestamp(timestamp: Long): GenericUpdateAction + def addTimestamp(timestamp: Long): UpdateAction - def addInfo(info: Option[String]): GenericUpdateAction + def addInfo(info: Option[String]): UpdateAction - def addAuthorId(authorId: Option[String]): GenericUpdateAction + def addAuthorId(authorId: Option[String]): UpdateAction def isViewOnlyChange: Boolean = false } -object GenericUpdateAction { +object UpdateAction { - implicit object genericUpdateActionFormat extends Format[GenericUpdateAction] { - override def reads(json: JsValue): JsResult[GenericUpdateAction] = { + implicit object updateActionFormat extends Format[UpdateAction] { + override def reads(json: JsValue): JsResult[UpdateAction] = { val jsonValue = (json \ "value").as[JsObject] (json \ "name").as[String] match { case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) @@ -102,7 +101,7 @@ object GenericUpdateAction { private val positionTransform = (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - override def writes(a: GenericUpdateAction): JsObject = a match { + override def writes(a: UpdateAction): JsObject = a match { case s: CreateTreeSkeletonAction => Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) case s: DeleteTreeSkeletonAction => @@ -150,21 +149,21 @@ object GenericUpdateAction { } } -case class GenericUpdateActionGroup(version: Long, - timestamp: Long, - authorId: Option[String], - actions: List[GenericUpdateAction], - stats: Option[JsObject], - info: Option[String], - transactionId: String, - transactionGroupCount: Int, - transactionGroupIndex: Int) { +case class UpdateActionGroup(version: Long, + timestamp: Long, + authorId: Option[String], + actions: List[UpdateAction], + stats: Option[JsObject], + info: Option[String], + transactionId: String, + transactionGroupCount: Int, + transactionGroupIndex: Int) { def significantChangesCount: Int = 1 // TODO def viewChangesCount: Int = 1 // TODO } -object GenericUpdateActionGroup { - implicit val jsonFormat: OFormat[GenericUpdateActionGroup] = Json.format[GenericUpdateActionGroup] +object UpdateActionGroup { + implicit val jsonFormat: OFormat[UpdateActionGroup] = Json.format[UpdateActionGroup] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala index eff6f83c303..d03ea14c69e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, GenericUpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -31,8 +31,8 @@ class DSAnnotationController @Inject()( } } - def update(annotationId: String, token: Option[String]): Action[List[GenericUpdateActionGroup]] = - Action.async(validateJson[List[GenericUpdateActionGroup]]) { implicit request => + def update(annotationId: String, token: Option[String]): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId), diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala deleted file mode 100644 index 878b9cef553..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/UpdateActions.scala +++ /dev/null @@ -1,86 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings - -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import play.api.libs.json._ -import scalapb.GeneratedMessage - -trait UpdateAction[T <: GeneratedMessage] { - - def actionTimestamp: Option[Long] - - def actionAuthorId: Option[String] - - def applyOn(tracing: T): T = tracing - - def addTimestamp(timestamp: Long): UpdateAction[T] = this - - def addInfo(info: Option[String]): UpdateAction[T] = this - - def addAuthorId(authorId: Option[String]): UpdateAction[T] = this - - def transformToCompact: UpdateAction[T] = this - - // For analytics we wan to know how many changes are view only (e.g. move camera, toggle tree visibility) - // Overridden in subclasses - def isViewOnlyChange: Boolean = false -} - -case class UpdateActionGroup[T <: GeneratedMessage]( - version: Long, - timestamp: Long, - authorId: Option[String], - actions: List[UpdateAction[T]], - stats: Option[JsObject], - info: Option[String], - transactionId: String, - transactionGroupCount: Int, - transactionGroupIndex: Int -) { - def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) - def viewChangesCount: Int = actions.count(_.isViewOnlyChange) -} - -object UpdateActionGroup { - - implicit def updateActionGroupReads[T <: GeneratedMessage]( - implicit fmt: Reads[UpdateAction[T]]): Reads[UpdateActionGroup[T]] = - (json: JsValue) => - for { - version <- json.validate((JsPath \ "version").read[Long]) - timestamp <- json.validate((JsPath \ "timestamp").read[Long]) - authorId <- json.validate((JsPath \ "authorId").readNullable[String]) - actions <- json.validate((JsPath \ "actions").read[List[UpdateAction[T]]]) - stats <- json.validate((JsPath \ "stats").readNullable[JsObject]) - info <- json.validate((JsPath \ "info").readNullable[String]) - transactionId <- json.validate((JsPath \ "transactionId").read[String]) - transactionGroupCount <- json.validate((JsPath \ "transactionGroupCount").read[Int]) - transactionGroupIndex <- json.validate((JsPath \ "transactionGroupIndex").read[Int]) - } yield { - UpdateActionGroup[T](version, - timestamp, - authorId, - actions, - stats, - info, - transactionId, - transactionGroupCount, - transactionGroupIndex) - } - - implicit def updateActionGroupWrites[T <: GeneratedMessage]( - implicit fmt: Writes[UpdateAction[T]]): Writes[UpdateActionGroup[T]] = - (value: UpdateActionGroup[T]) => - Json.obj( - "version" -> value.version, - "timestamp" -> value.timestamp, - "authorId" -> value.authorId, - "actions" -> Json.toJson(value.actions), - "stats" -> value.stats, - "info" -> value.info, - "transactionId" -> value.transactionId, - "transactionGroupCount" -> value.transactionGroupCount, - "transactionGroupIndex" -> value.transactionGroupIndex - ) - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index f449ddd32ec..e24c7d59f68 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -4,11 +4,11 @@ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.annotation.GenericUpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ -trait SkeletonUpdateAction extends GenericUpdateAction +trait SkeletonUpdateAction extends UpdateAction case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], @@ -43,10 +43,10 @@ case class CreateTreeSkeletonAction(id: Int, tracing.withTrees(newTree +: tracing.trees) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -58,12 +58,12 @@ case class DeleteTreeSkeletonAction(id: Int, /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTrees(tracing.trees.filter(_.treeId != id))*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -95,10 +95,10 @@ case class UpdateTreeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, id, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -124,12 +124,12 @@ case class MergeTreeSkeletonAction(sourceId: Int, tracing.withTrees(mapTrees(tracing, targetId, treeTransform).filter(_.treeId != sourceId)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -167,12 +167,12 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], } */ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -189,12 +189,12 @@ case class CreateEdgeSkeletonAction(source: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -211,12 +211,12 @@ case class DeleteEdgeSkeletonAction(source: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -257,12 +257,12 @@ case class CreateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -306,12 +306,12 @@ case class UpdateNodeSkeletonAction(id: Int, } */ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -331,12 +331,12 @@ case class DeleteNodeSkeletonAction(nodeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -349,12 +349,12 @@ case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withTreeGroups(treeGroups.map(convertTreeGroup))*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -379,12 +379,12 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) )*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -396,12 +396,12 @@ case class RevertToVersionSkeletonAction(sourceVersion: Long, /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = throw new Exception("RevertToVersionAction applied on unversioned tracing")*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -418,12 +418,12 @@ case class UpdateTreeVisibilitySkeletonAction(treeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -456,12 +456,12 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], } }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -479,12 +479,12 @@ case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -496,12 +496,12 @@ case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundi /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -523,12 +523,12 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) }*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } @@ -539,12 +539,12 @@ case class UpdateTdCameraSkeletonAction(actionTimestamp: Option[Long] = None, /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing*/ - override def addTimestamp(timestamp: Long): GenericUpdateAction = + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): GenericUpdateAction = + override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index c540f85d13b..d8a2e36d743 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.annotation.GenericUpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.NamedBoundingBox import play.api.libs.json._ @@ -22,7 +22,7 @@ trait VolumeUpdateActionHelper { } -trait VolumeUpdateAction extends GenericUpdateAction +trait VolumeUpdateAction extends UpdateAction trait ApplyableVolumeAction extends VolumeUpdateAction @@ -40,7 +40,7 @@ case class UpdateBucketVolumeAction(position: Vec3Int, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) def transformToCompact: CompactVolumeUpdateAction = CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) @@ -64,7 +64,7 @@ case class UpdateTracingVolumeAction( override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true } @@ -82,7 +82,7 @@ case class RevertToVersionVolumeAction(sourceVersion: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } object RevertToVersionVolumeAction { @@ -98,7 +98,7 @@ case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ @@ -117,7 +117,7 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true @@ -146,7 +146,7 @@ case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.clearFallbackLayer*/ @@ -164,7 +164,7 @@ case class ImportVolumeData(largestSegmentId: Option[Long], override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(largestSegmentId = largestSegmentId)*/ @@ -181,7 +181,7 @@ case class AddSegmentIndex(actionTimestamp: Option[Long] = None, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(hasSegmentIndex = Some(true))*/ @@ -201,7 +201,7 @@ case class UpdateTdCamera(actionTimestamp: Option[Long] = None, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true } @@ -227,7 +227,7 @@ case class CreateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = @@ -264,7 +264,7 @@ case class UpdateSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = @@ -294,7 +294,7 @@ case class DeleteSegmentVolumeAction(id: Long, this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /*override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withSegments(tracing.segments.filter(_.segmentId != id))*/ @@ -313,7 +313,7 @@ case class DeleteSegmentDataVolumeAction(id: Long, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } object DeleteSegmentDataVolumeAction { @@ -330,7 +330,7 @@ case class UpdateMappingNameVolumeAction(mappingName: Option[String], override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) /* override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked @@ -353,7 +353,7 @@ case class CompactVolumeUpdateAction(name: String, override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } object CompactVolumeUpdateAction { @@ -386,7 +386,7 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): GenericUpdateAction = this.copy(info = info) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } object UpdateSegmentGroupsVolumeAction { From 08b8bccf22f74069ef3a49f848580877075e360e Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 17 Jul 2024 11:24:35 +0200 Subject: [PATCH 021/361] volume action serialization, wip annotation actions --- .../annotation/AnnotationUpdateActions.scala | 75 ++++++ .../annotation/DSAnnotationService.scala | 27 ++- .../annotation/UpdateActions.scala | 72 +++++- .../EditableMappingUpdateActions.scala | 65 ++--- .../volume/VolumeTracingService.scala | 6 +- .../tracings/volume/VolumeUpdateActions.scala | 222 ++++++------------ 6 files changed, 250 insertions(+), 217 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala new file mode 100644 index 00000000000..725634f66e8 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -0,0 +1,75 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType +import play.api.libs.json.{Json, OFormat} + +trait AnnotationUpdateAction extends UpdateAction + +case class AddLayerAnnotationUpdateAction(layerName: String, + tracingId: String, + typ: AnnotationLayerType, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class DeleteLayerAnnotationUpdateAction(tracingId: String, + layerName: String, // Just stored for nicer-looking history + typ: AnnotationLayerType, // Just stored for nicer-looking history + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateLayerMetadataAnnotationUpdateAction(tracingId: String, + layerName: String, // Just stored for nicer-looking history + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateMetadataAnnotationUpdateAction(name: Option[String], + description: Option[String], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +object AddLayerAnnotationUpdateAction { + implicit val jsonFormat: OFormat[AddLayerAnnotationUpdateAction] = Json.format[AddLayerAnnotationUpdateAction] +} +object DeleteLayerAnnotationUpdateAction { + implicit val jsonFormat: OFormat[DeleteLayerAnnotationUpdateAction] = Json.format[DeleteLayerAnnotationUpdateAction] +} +object UpdateLayerMetadataAnnotationUpdateAction { + implicit val jsonFormat: OFormat[UpdateLayerMetadataAnnotationUpdateAction] = + Json.format[UpdateLayerMetadataAnnotationUpdateAction] +} +object UpdateMetadataAnnotationUpdateAction { + implicit val jsonFormat: OFormat[UpdateMetadataAnnotationUpdateAction] = + Json.format[UpdateMetadataAnnotationUpdateAction] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala index e26c5c123d8..c0c634e5f74 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala @@ -10,6 +10,7 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } +import com.scalableminds.webknossos.tracingstore.tracings.volume.UpdateBucketVolumeAction import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import scalapb.GeneratedMessage @@ -20,7 +21,6 @@ import scala.concurrent.ExecutionContext class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits { - def storeUpdate(updateAction: GeneratedMessage)(implicit ec: ExecutionContext): Fox[Unit] = Fox.successful(()) def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = for { @@ -42,15 +42,22 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl previousVersion: Long, userToken: Option[String]): Fox[Unit] = // TODO apply some updates directly? transform to compact? - tracingDataStore.annotationUpdates.put( - annotationId, - updateActionGroup.version, - updateActionGroup.actions - .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info - case Nil => List[UpdateAction]() - case first :: rest => first.addInfo(updateActionGroup.info) :: rest - } - ) + tracingDataStore.annotationUpdates.put(annotationId, + updateActionGroup.version, + preprocessActionsForStorage(updateActionGroup)) + + private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { + val actionsWithInfo = updateActionGroup.actions.map( + _.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { + case Nil => List[UpdateAction]() + //to the first action in the group, attach the group's info + case first :: rest => first.addInfo(updateActionGroup.info) :: rest + } + actionsWithInfo.map { + case a: UpdateBucketVolumeAction => a.transformToCompact // TODO or not? + case a => a + } + } def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( implicit ec: ExecutionContext): Fox[AnnotationProto] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 169f6e9d6f1..1858251c968 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -1,5 +1,9 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + MergeAgglomerateUpdateAction, + SplitAgglomerateUpdateAction +} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ CreateEdgeSkeletonAction, CreateNodeSkeletonAction, @@ -22,19 +26,20 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ UpdateUserBoundingBoxesSkeletonAction } import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + CompactVolumeUpdateAction, CreateSegmentVolumeAction, DeleteSegmentDataVolumeAction, DeleteSegmentVolumeAction, - ImportVolumeData, - RemoveFallbackLayer, + ImportVolumeDataVolumeAction, + RemoveFallbackLayerVolumeAction, UpdateBucketVolumeAction, UpdateMappingNameVolumeAction, UpdateSegmentGroupsVolumeAction, UpdateSegmentVolumeAction, - UpdateTdCamera, + UpdateTdCameraVolumeAction, UpdateTracingVolumeAction, - UpdateUserBoundingBoxVisibility, - UpdateUserBoundingBoxes + UpdateUserBoundingBoxVisibilityVolumeAction, + UpdateUserBoundingBoxesVolumeAction } import play.api.libs.json.{Format, JsError, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} @@ -56,6 +61,7 @@ object UpdateAction { override def reads(json: JsValue): JsResult[UpdateAction] = { val jsonValue = (json \ "value").as[JsObject] (json \ "name").as[String] match { + // Skeletons case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) @@ -74,21 +80,30 @@ object UpdateAction { case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) + + // Volumes case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxes](jsonValue) - case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibility](jsonValue) - case "removeFallbackLayer" => deserialize[RemoveFallbackLayer](jsonValue) - case "importVolumeTracing" => deserialize[ImportVolumeData](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCamera](jsonValue) + case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) + case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraVolumeAction](jsonValue) case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } // TODO revertToVersion + + // Editable Mappings + case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) + case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) + + // TODO: Annotation, RevertToVersion + + case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + } } private def deserialize[T](json: JsValue, shouldTransformPositions: Boolean = false)( @@ -101,7 +116,7 @@ object UpdateAction { private val positionTransform = (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) - override def writes(a: UpdateAction): JsObject = a match { + override def writes(a: UpdateAction): JsValue = a match { case s: CreateTreeSkeletonAction => Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) case s: DeleteTreeSkeletonAction => @@ -145,6 +160,37 @@ object UpdateAction { "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) case s: UpdateTdCameraSkeletonAction => Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) + + case s: UpdateBucketVolumeAction => + Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) + case s: UpdateTracingVolumeAction => + Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxesVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxes", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxesVolumeAction.jsonFormat)) + case s: UpdateUserBoundingBoxVisibilityVolumeAction => + Json.obj("name" -> "updateUserBoundingBoxVisibility", + "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilityVolumeAction.jsonFormat)) + case s: RemoveFallbackLayerVolumeAction => + Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayerVolumeAction.jsonFormat)) + case s: ImportVolumeDataVolumeAction => + Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeDataVolumeAction.jsonFormat)) + case s: UpdateTdCameraVolumeAction => + Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraVolumeAction.jsonFormat)) + case s: CreateSegmentVolumeAction => + Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentVolumeAction => + Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) + case s: DeleteSegmentVolumeAction => + Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) + case s: UpdateSegmentGroupsVolumeAction => + Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) + case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) + + case s: SplitAgglomerateUpdateAction => + Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) + case s: MergeAgglomerateUpdateAction => + Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 96f01b912da..7f037238097 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -1,12 +1,11 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction import play.api.libs.json.Format.GenericFormat import play.api.libs.json._ -trait EditableMappingUpdateAction { - def addTimestamp(timestamp: Long): EditableMappingUpdateAction -} +trait EditableMappingUpdateAction extends UpdateAction // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. @@ -16,9 +15,16 @@ case class SplitAgglomerateUpdateAction(agglomerateId: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) } object SplitAgglomerateUpdateAction { @@ -34,53 +40,18 @@ case class MergeAgglomerateUpdateAction(agglomerateId1: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, - actionTimestamp: Option[Long] = None) + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) -} -object MergeAgglomerateUpdateAction { - implicit val jsonFormat: OFormat[MergeAgglomerateUpdateAction] = Json.format[MergeAgglomerateUpdateAction] -} + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) -case class RevertToVersionUpdateAction(sourceVersion: Long, actionTimestamp: Option[Long] = None) - extends EditableMappingUpdateAction { - override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) } -object RevertToVersionUpdateAction { - implicit val jsonFormat: OFormat[RevertToVersionUpdateAction] = Json.format[RevertToVersionUpdateAction] -} - -object EditableMappingUpdateAction { - - implicit object editableMappingUpdateActionFormat extends Format[EditableMappingUpdateAction] { - override def reads(json: JsValue): JsResult[EditableMappingUpdateAction] = - (json \ "name").validate[String].flatMap { - case "mergeAgglomerate" => (json \ "value").validate[MergeAgglomerateUpdateAction] - case "splitAgglomerate" => (json \ "value").validate[SplitAgglomerateUpdateAction] - case "revertToVersion" => (json \ "value").validate[RevertToVersionUpdateAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: EditableMappingUpdateAction): JsValue = o match { - case s: SplitAgglomerateUpdateAction => - Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) - case s: MergeAgglomerateUpdateAction => - Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) - case s: RevertToVersionUpdateAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionUpdateAction.jsonFormat)) - } - } - -} - -case class EditableMappingUpdateActionGroup( - version: Long, - timestamp: Long, - actions: List[EditableMappingUpdateAction] -) - -object EditableMappingUpdateActionGroup { - implicit val jsonFormat: OFormat[EditableMappingUpdateActionGroup] = Json.format[EditableMappingUpdateActionGroup] +object MergeAgglomerateUpdateAction { + implicit val jsonFormat: OFormat[MergeAgglomerateUpdateAction] = Json.format[MergeAgglomerateUpdateAction] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 74c46726eae..7a19b438837 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -162,7 +162,7 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot delete segment data for annotations without segment index.") } else deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case _: UpdateTdCamera => Fox.successful(tracing) + case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) case _ => Fox.failure("Unknown action.") } @@ -909,7 +909,7 @@ class VolumeTracingService @Inject()( tracing.version + 1L, System.currentTimeMillis(), None, - List(AddSegmentIndex()), + List(AddSegmentIndexVolumeAction()), None, None, "dummyTransactionId", @@ -996,7 +996,7 @@ class VolumeTracingService @Inject()( tracing.version + 1, System.currentTimeMillis(), None, - List(ImportVolumeData(Some(mergedVolume.largestSegmentId.toPositiveLong))), + List(ImportVolumeDataVolumeAction(Some(mergedVolume.largestSegmentId.toPositiveLong))), None, None, "dummyTransactionId", diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index d8a2e36d743..360bd55c055 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -46,10 +46,6 @@ case class UpdateBucketVolumeAction(position: Vec3Int, CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) } -object UpdateBucketVolumeAction { - implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] -} - case class UpdateTracingVolumeAction( activeSegmentId: Long, editPosition: Vec3Int, @@ -69,10 +65,6 @@ case class UpdateTracingVolumeAction( override def isViewOnlyChange: Boolean = true } -object UpdateTracingVolumeAction { - implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] -} - case class RevertToVersionVolumeAction(sourceVersion: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, @@ -85,14 +77,10 @@ case class RevertToVersionVolumeAction(sourceVersion: Long, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } -object RevertToVersionVolumeAction { - implicit val jsonFormat: OFormat[RevertToVersionVolumeAction] = Json.format[RevertToVersionVolumeAction] -} - -case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBoundingBox], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -104,15 +92,11 @@ case class UpdateUserBoundingBoxes(boundingBoxes: List[NamedBoundingBox], tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ } -object UpdateUserBoundingBoxes { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxes] = Json.format[UpdateUserBoundingBoxes] -} - -case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], - isVisible: Boolean, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int], + isVisible: Boolean, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = @@ -135,13 +119,9 @@ case class UpdateUserBoundingBoxVisibility(boundingBoxId: Option[Int], }*/ } -object UpdateUserBoundingBoxVisibility { - implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibility] = Json.format[UpdateUserBoundingBoxVisibility] -} - -case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class RemoveFallbackLayerVolumeAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = @@ -152,14 +132,10 @@ case class RemoveFallbackLayer(actionTimestamp: Option[Long] = None, tracing.clearFallbackLayer*/ } -object RemoveFallbackLayer { - implicit val jsonFormat: OFormat[RemoveFallbackLayer] = Json.format[RemoveFallbackLayer] -} - -case class ImportVolumeData(largestSegmentId: Option[Long], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class ImportVolumeDataVolumeAction(largestSegmentId: Option[Long], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = @@ -170,13 +146,9 @@ case class ImportVolumeData(largestSegmentId: Option[Long], tracing.copy(largestSegmentId = largestSegmentId)*/ } -object ImportVolumeData { - implicit val jsonFormat: OFormat[ImportVolumeData] = Json.format[ImportVolumeData] -} - -case class AddSegmentIndex(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class AddSegmentIndexVolumeAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends ApplyableVolumeAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = @@ -188,13 +160,9 @@ case class AddSegmentIndex(actionTimestamp: Option[Long] = None, } -object AddSegmentIndex { - implicit val jsonFormat: OFormat[AddSegmentIndex] = Json.format[AddSegmentIndex] -} - -case class UpdateTdCamera(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateTdCameraVolumeAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends VolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = @@ -206,10 +174,6 @@ case class UpdateTdCamera(actionTimestamp: Option[Long] = None, override def isViewOnlyChange: Boolean = true } -object UpdateTdCamera { - implicit val jsonFormat: OFormat[UpdateTdCamera] = Json.format[UpdateTdCamera] -} - case class CreateSegmentVolumeAction(id: Long, anchorPosition: Option[Vec3Int], name: Option[String], @@ -242,10 +206,6 @@ case class CreateSegmentVolumeAction(id: Long, }*/ } -object CreateSegmentVolumeAction { - implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] -} - case class UpdateSegmentVolumeAction(id: Long, anchorPosition: Option[Vec3Int], name: Option[String], @@ -280,10 +240,6 @@ case class UpdateSegmentVolumeAction(id: Long, }*/ } -object UpdateSegmentVolumeAction { - implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] -} - case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, @@ -301,10 +257,6 @@ case class DeleteSegmentVolumeAction(id: Long, } -object DeleteSegmentVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] -} - case class DeleteSegmentDataVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, @@ -316,10 +268,6 @@ case class DeleteSegmentDataVolumeAction(id: Long, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } -object DeleteSegmentDataVolumeAction { - implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] -} - case class UpdateMappingNameVolumeAction(mappingName: Option[String], isEditable: Option[Boolean], isLocked: Option[Boolean], @@ -340,8 +288,19 @@ case class UpdateMappingNameVolumeAction(mappingName: Option[String], mappingIsLocked = Some(isLocked.getOrElse(false)))*/ } -object UpdateMappingNameVolumeAction { - implicit val jsonFormat: OFormat[UpdateMappingNameVolumeAction] = Json.format[UpdateMappingNameVolumeAction] +case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends VolumeUpdateAction + with VolumeUpdateActionHelper { + /*override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup))*/ + + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } case class CompactVolumeUpdateAction(name: String, @@ -374,75 +333,50 @@ object CompactVolumeUpdateAction { } } -case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction - with VolumeUpdateActionHelper { - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup))*/ - - override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) +object UpdateBucketVolumeAction { + implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] +} +object UpdateTracingVolumeAction { + implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] +} +object RevertToVersionVolumeAction { + implicit val jsonFormat: OFormat[RevertToVersionVolumeAction] = Json.format[RevertToVersionVolumeAction] +} +object UpdateUserBoundingBoxesVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesVolumeAction] = + Json.format[UpdateUserBoundingBoxesVolumeAction] +} +object UpdateUserBoundingBoxVisibilityVolumeAction { + implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilityVolumeAction] = + Json.format[UpdateUserBoundingBoxVisibilityVolumeAction] +} +object RemoveFallbackLayerVolumeAction { + implicit val jsonFormat: OFormat[RemoveFallbackLayerVolumeAction] = Json.format[RemoveFallbackLayerVolumeAction] +} +object ImportVolumeDataVolumeAction { + implicit val jsonFormat: OFormat[ImportVolumeDataVolumeAction] = Json.format[ImportVolumeDataVolumeAction] +} +object AddSegmentIndexVolumeAction { + implicit val jsonFormat: OFormat[AddSegmentIndexVolumeAction] = Json.format[AddSegmentIndexVolumeAction] +} +object UpdateTdCameraVolumeAction { + implicit val jsonFormat: OFormat[UpdateTdCameraVolumeAction] = Json.format[UpdateTdCameraVolumeAction] +} +object CreateSegmentVolumeAction { + implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] +} +object UpdateSegmentVolumeAction { + implicit val jsonFormat: OFormat[UpdateSegmentVolumeAction] = Json.format[UpdateSegmentVolumeAction] +} +object DeleteSegmentVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentVolumeAction] = Json.format[DeleteSegmentVolumeAction] +} +object DeleteSegmentDataVolumeAction { + implicit val jsonFormat: OFormat[DeleteSegmentDataVolumeAction] = Json.format[DeleteSegmentDataVolumeAction] +} +object UpdateMappingNameVolumeAction { + implicit val jsonFormat: OFormat[UpdateMappingNameVolumeAction] = Json.format[UpdateMappingNameVolumeAction] } - object UpdateSegmentGroupsVolumeAction { implicit val jsonFormat: OFormat[UpdateSegmentGroupsVolumeAction] = Json.format[UpdateSegmentGroupsVolumeAction] } - -object VolumeUpdateAction { - - implicit object volumeUpdateActionFormat extends Format[VolumeUpdateAction] { - override def reads(json: JsValue): JsResult[VolumeUpdateAction] = - (json \ "name").validate[String].flatMap { - case "updateBucket" => (json \ "value").validate[UpdateBucketVolumeAction] - case "updateTracing" => (json \ "value").validate[UpdateTracingVolumeAction] - case "revertToVersion" => (json \ "value").validate[RevertToVersionVolumeAction] - case "updateUserBoundingBoxes" => (json \ "value").validate[UpdateUserBoundingBoxes] - case "updateUserBoundingBoxVisibility" => (json \ "value").validate[UpdateUserBoundingBoxVisibility] - case "removeFallbackLayer" => (json \ "value").validate[RemoveFallbackLayer] - case "importVolumeTracing" => (json \ "value").validate[ImportVolumeData] - case "updateTdCamera" => (json \ "value").validate[UpdateTdCamera] - case "createSegment" => (json \ "value").validate[CreateSegmentVolumeAction] - case "updateSegment" => (json \ "value").validate[UpdateSegmentVolumeAction] - case "updateSegmentGroups" => (json \ "value").validate[UpdateSegmentGroupsVolumeAction] - case "deleteSegment" => (json \ "value").validate[DeleteSegmentVolumeAction] - case "deleteSegmentData" => (json \ "value").validate[DeleteSegmentDataVolumeAction] - case "updateMappingName" => (json \ "value").validate[UpdateMappingNameVolumeAction] - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") - } - - override def writes(o: VolumeUpdateAction): JsValue = o match { - case s: UpdateBucketVolumeAction => - Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) - case s: UpdateTracingVolumeAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) - case s: RevertToVersionVolumeAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionVolumeAction.jsonFormat)) - case s: UpdateUserBoundingBoxes => - Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxes.jsonFormat)) - case s: UpdateUserBoundingBoxVisibility => - Json.obj("name" -> "updateUserBoundingBoxVisibility", - "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibility.jsonFormat)) - case s: RemoveFallbackLayer => - Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayer.jsonFormat)) - case s: ImportVolumeData => - Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeData.jsonFormat)) - case s: UpdateTdCamera => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCamera.jsonFormat)) - case s: CreateSegmentVolumeAction => - Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentVolumeAction => - Json.obj("name" -> "updateSegment", "value" -> Json.toJson(s)(UpdateSegmentVolumeAction.jsonFormat)) - case s: DeleteSegmentVolumeAction => - Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) - case s: UpdateSegmentGroupsVolumeAction => - Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) - case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) - } - } - -} From 21d1078b64c9e984768bfae0168466816661fd54 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 17 Jul 2024 11:46:59 +0200 Subject: [PATCH 022/361] further unify update actions, add tracingId to LayerUpdateActions --- .../annotation/UpdateActions.scala | 32 +++++++++-- .../controllers/VolumeTracingController.scala | 32 +++++------ .../EditableMappingService.scala | 5 +- .../skeleton/SkeletonTracingService.scala | 8 +-- .../updating/SkeletonUpdateActions.scala | 41 +++++++++---- .../volume/VolumeTracingService.scala | 20 +++---- .../tracings/volume/VolumeUpdateActions.scala | 57 +++++++++++++------ 7 files changed, 128 insertions(+), 67 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 1858251c968..04c58bf3235 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -55,13 +55,17 @@ trait UpdateAction { def isViewOnlyChange: Boolean = false } +trait LayerUpdateAction extends UpdateAction { + def actionTracingId: String +} + object UpdateAction { implicit object updateActionFormat extends Format[UpdateAction] { override def reads(json: JsValue): JsResult[UpdateAction] = { val jsonValue = (json \ "value").as[JsObject] (json \ "name").as[String] match { - // Skeletons + // Skeleton case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) @@ -81,7 +85,7 @@ object UpdateAction { case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) - // Volumes + // Volume case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) @@ -96,11 +100,15 @@ object UpdateAction { case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) - // Editable Mappings + // Editable Mapping case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) - // TODO: Annotation, RevertToVersion + // Annotation + case "addLayerToAnnotation" => deserialize[AddLayerAnnotationUpdateAction](jsonValue) + case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationUpdateAction](jsonValue) + case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationUpdateAction](jsonValue) + case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationUpdateAction](jsonValue) case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") } @@ -117,6 +125,7 @@ object UpdateAction { (JsPath \ "position").json.update(JsPath.read[List[Float]].map(position => Json.toJson(position.map(_.toInt)))) override def writes(a: UpdateAction): JsValue = a match { + // Skeleton case s: CreateTreeSkeletonAction => Json.obj("name" -> "createTree", "value" -> Json.toJson(s)(CreateTreeSkeletonAction.jsonFormat)) case s: DeleteTreeSkeletonAction => @@ -161,6 +170,7 @@ object UpdateAction { case s: UpdateTdCameraSkeletonAction => Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) + // Volume case s: UpdateBucketVolumeAction => Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) case s: UpdateTracingVolumeAction => @@ -187,10 +197,24 @@ object UpdateAction { Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) + // Editable Mapping case s: SplitAgglomerateUpdateAction => Json.obj("name" -> "splitAgglomerate", "value" -> Json.toJson(s)(SplitAgglomerateUpdateAction.jsonFormat)) case s: MergeAgglomerateUpdateAction => Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) + + // Annotation + case s: AddLayerAnnotationUpdateAction => + Json.obj("name" -> "addLayerToAnnotation", "value" -> Json.toJson(s)(AddLayerAnnotationUpdateAction.jsonFormat)) + case s: DeleteLayerAnnotationUpdateAction => + Json.obj("name" -> "deleteLayerFromAnnotation", + "value" -> Json.toJson(s)(DeleteLayerAnnotationUpdateAction.jsonFormat)) + case s: UpdateLayerMetadataAnnotationUpdateAction => + Json.obj("name" -> "updateLayerMetadata", + "value" -> Json.toJson(s)(UpdateLayerMetadataAnnotationUpdateAction.jsonFormat)) + case s: UpdateMetadataAnnotationUpdateAction => + Json.obj("name" -> "updateMetadataOfAnnotation", + "value" -> Json.toJson(s)(UpdateMetadataAnnotationUpdateAction.jsonFormat)) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index dcfd07b026d..10acdac3bb8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -26,10 +26,10 @@ import com.scalableminds.webknossos.datastore.services.{ FullMeshRequest, UserAccessRequest } +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, - EditableMappingUpdateActionGroup, MinCutParameters, NeighborsParameters } @@ -43,7 +43,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeSegmentStatisticsService, VolumeTracingService } -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, @@ -369,20 +369,20 @@ class VolumeTracingController @Inject()( (editableMappingId, editableMappingInfo) <- editableMappingService.create( baseMappingName = tracingMappingName) volumeUpdate = UpdateMappingNameVolumeAction(Some(editableMappingId), - isEditable = Some(true), - isLocked = Some(true), - actionTimestamp = Some(System.currentTimeMillis())) + isEditable = Some(true), + isLocked = Some(true), + actionTimestamp = Some(System.currentTimeMillis())) _ <- tracingService.handleUpdateGroup( tracingId, - UpdateActionGroup[VolumeTracing](tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0), + UpdateActionGroup(tracing.version + 1, + System.currentTimeMillis(), + None, + List(volumeUpdate), + None, + None, + "dummyTransactionId", + 1, + 0), tracing.version, urlOrHeaderToken(token, request) ) @@ -428,8 +428,8 @@ class VolumeTracingController @Inject()( } } - def updateEditableMapping(token: Option[String], tracingId: String): Action[List[EditableMappingUpdateActionGroup]] = - Action.async(validateJson[List[EditableMappingUpdateActionGroup]]) { implicit request => + def updateEditableMapping(token: Option[String], tracingId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index ff66dd20ac2..672814aff9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -21,6 +21,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -264,9 +265,7 @@ class EditableMappingService @Inject()( _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer, userToken)) } yield (materializedInfo, desiredVersion) - def update(editableMappingId: String, - updateActionGroup: EditableMappingUpdateActionGroup, - newVersion: Long): Fox[Unit] = + def update(editableMappingId: String, updateActionGroup: UpdateActionGroup, newVersion: Long): Fox[Unit] = for { actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 5be705493fa..5eb19317295 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ @@ -36,16 +37,13 @@ class SkeletonTracingService @Inject()( implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - implicit val updateActionJsonFormat: SkeletonUpdateAction.skeletonUpdateActionFormat.type = - SkeletonUpdateAction.skeletonUpdateActionFormat - def currentVersion(tracingId: String): Fox[Long] = tracingDataStore.skeletonUpdates.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) def currentVersion(tracing: SkeletonTracing): Long = tracing.version def handleUpdateGroup(tracingId: String, - updateActionGroup: UpdateActionGroup[SkeletonTracing], + updateActionGroup: UpdateActionGroup, previousVersion: Long, userToken: Option[String]): Fox[_] = tracingDataStore.skeletonUpdates.put( @@ -118,7 +116,7 @@ class SkeletonTracingService @Inject()( case Full(tracing) => remainingUpdates match { case List() => Fox.successful(tracing) - case RevertToVersionSkeletonAction(sourceVersion, _, _, _) :: tail => + case RevertToVersionSkeletonAction(tracingId, sourceVersion, _, _, _) :: tail => val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) updateIter(sourceTracing, tail) case update :: tail => updateIter(Full(update.applyOn(tracing)), tail) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index e24c7d59f68..cf43dc92e47 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -4,11 +4,11 @@ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ -trait SkeletonUpdateAction extends UpdateAction +trait SkeletonUpdateAction extends LayerUpdateAction case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], @@ -18,11 +18,12 @@ case class CreateTreeSkeletonAction(id: Int, comments: List[UpdateActionComment], groupId: Option[Int], isVisible: Option[Boolean], + `type`: Option[TreeType] = None, + edgesAreVisible: Option[Boolean], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None, - edgesAreVisible: Option[Boolean]) + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -51,6 +52,7 @@ case class CreateTreeSkeletonAction(id: Int, } case class DeleteTreeSkeletonAction(id: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -74,10 +76,11 @@ case class UpdateTreeSkeletonAction(id: Int, branchPoints: List[UpdateActionBranchPoint], comments: List[UpdateActionComment], groupId: Option[Int], + `type`: Option[TreeType] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - `type`: Option[TreeType] = None) + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -104,6 +107,7 @@ case class UpdateTreeSkeletonAction(id: Int, case class MergeTreeSkeletonAction(sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -136,6 +140,7 @@ case class MergeTreeSkeletonAction(sourceId: Int, case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], sourceId: Int, targetId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -179,6 +184,7 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], case class CreateEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -201,6 +207,7 @@ case class CreateEdgeSkeletonAction(source: Int, case class DeleteEdgeSkeletonAction(source: Int, target: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -230,10 +237,11 @@ case class CreateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { @@ -276,10 +284,11 @@ case class UpdateNodeSkeletonAction(id: Int, interpolation: Option[Boolean], treeId: Int, timestamp: Long, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) + info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { @@ -318,6 +327,7 @@ case class UpdateNodeSkeletonAction(id: Int, case class DeleteNodeSkeletonAction(nodeId: Int, treeId: Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -341,6 +351,7 @@ case class DeleteNodeSkeletonAction(nodeId: Int, } case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -363,6 +374,7 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editRotation: com.scalableminds.util.geometry.Vec3Double, zoomLevel: Double, userBoundingBox: Option[com.scalableminds.util.geometry.BoundingBox], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None, @@ -389,6 +401,7 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], } case class RevertToVersionSkeletonAction(sourceVersion: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -407,6 +420,7 @@ case class RevertToVersionSkeletonAction(sourceVersion: Long, case class UpdateTreeVisibilitySkeletonAction(treeId: Int, isVisible: Boolean, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -429,6 +443,7 @@ case class UpdateTreeVisibilitySkeletonAction(treeId: Int, case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], isVisible: Boolean, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -467,6 +482,7 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, edgesAreVisible: Boolean, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -489,6 +505,7 @@ case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, } case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -507,6 +524,7 @@ case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundi case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[Int], isVisible: Boolean, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -534,6 +552,7 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I case class UpdateTdCameraSkeletonAction(actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, + actionTracingId: String, info: Option[String] = None) extends SkeletonUpdateAction { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 7a19b438837..5732d209dc4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -23,6 +23,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services._ +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -77,9 +78,6 @@ class VolumeTracingService @Inject()( implicit val tracingCompanion: VolumeTracing.type = VolumeTracing - implicit val updateActionJsonFormat: VolumeUpdateAction.volumeUpdateActionFormat.type = - VolumeUpdateAction.volumeUpdateActionFormat - val tracingType: TracingType = TracingType.volume val tracingStore: FossilDBClient = tracingDataStore.volumes @@ -117,7 +115,7 @@ class VolumeTracingService @Inject()( editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[VolumeTracing], + updateGroup: UpdateActionGroup, previousVersion: Long, userToken: Option[String]): Fox[Unit] = for { @@ -162,9 +160,9 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot delete segment data for annotations without segment index.") } else deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) - case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) - case _ => Fox.failure("Unknown action.") + case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) + case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) + case _ => Fox.failure("Unknown action.") } case Empty => Fox.empty @@ -905,11 +903,11 @@ class VolumeTracingService @Inject()( editableMappingTracingId(tracing, tracingId)) } _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) - updateGroup = UpdateActionGroup[VolumeTracing]( + updateGroup = UpdateActionGroup( tracing.version + 1L, System.currentTimeMillis(), None, - List(AddSegmentIndexVolumeAction()), + List(AddSegmentIndexVolumeAction(tracingId)), None, None, "dummyTransactionId", @@ -992,11 +990,11 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() - updateGroup = UpdateActionGroup[VolumeTracing]( + updateGroup = UpdateActionGroup( tracing.version + 1, System.currentTimeMillis(), None, - List(ImportVolumeDataVolumeAction(Some(mergedVolume.largestSegmentId.toPositiveLong))), + List(ImportVolumeDataVolumeAction(tracingId, Some(mergedVolume.largestSegmentId.toPositiveLong))), None, None, "dummyTransactionId", diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 360bd55c055..c36ade7c817 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate -import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.NamedBoundingBox import play.api.libs.json._ @@ -22,7 +22,7 @@ trait VolumeUpdateActionHelper { } -trait VolumeUpdateAction extends UpdateAction +trait VolumeUpdateAction extends LayerUpdateAction trait ApplyableVolumeAction extends VolumeUpdateAction @@ -30,10 +30,11 @@ case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, mag: Vec3Int, base64Data: String, + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) + info: Option[String] = None) extends VolumeUpdateAction { lazy val data: Array[Byte] = Base64.getDecoder.decode(base64Data) @@ -43,7 +44,7 @@ case class UpdateBucketVolumeAction(position: Vec3Int, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateBucket", actionTimestamp, actionAuthorId, Json.obj()) + CompactVolumeUpdateAction("updateBucket", Json.obj(), actionTracingId, actionTimestamp, actionAuthorId, info) } case class UpdateTracingVolumeAction( @@ -52,10 +53,11 @@ case class UpdateTracingVolumeAction( editRotation: Vec3Double, largestSegmentId: Option[Long], zoomLevel: Double, + editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - info: Option[String] = None, - editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None + info: Option[String] = None ) extends VolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = @@ -66,6 +68,7 @@ case class UpdateTracingVolumeAction( } case class RevertToVersionVolumeAction(sourceVersion: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -78,6 +81,7 @@ case class RevertToVersionVolumeAction(sourceVersion: Long, } case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBoundingBox], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -94,6 +98,7 @@ case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBounding case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int], isVisible: Boolean, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -119,7 +124,8 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int }*/ } -case class RemoveFallbackLayerVolumeAction(actionTimestamp: Option[Long] = None, +case class RemoveFallbackLayerVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) extends ApplyableVolumeAction { @@ -132,7 +138,8 @@ case class RemoveFallbackLayerVolumeAction(actionTimestamp: Option[Long] = None, tracing.clearFallbackLayer*/ } -case class ImportVolumeDataVolumeAction(largestSegmentId: Option[Long], +case class ImportVolumeDataVolumeAction(actionTracingId: String, + largestSegmentId: Option[Long], actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -146,7 +153,8 @@ case class ImportVolumeDataVolumeAction(largestSegmentId: Option[Long], tracing.copy(largestSegmentId = largestSegmentId)*/ } -case class AddSegmentIndexVolumeAction(actionTimestamp: Option[Long] = None, +case class AddSegmentIndexVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) extends ApplyableVolumeAction { @@ -160,7 +168,8 @@ case class AddSegmentIndexVolumeAction(actionTimestamp: Option[Long] = None, } -case class UpdateTdCameraVolumeAction(actionTimestamp: Option[Long] = None, +case class UpdateTdCameraVolumeAction(actionTracingId: String, + actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) extends VolumeUpdateAction { @@ -180,9 +189,10 @@ case class CreateSegmentVolumeAction(id: Long, color: Option[com.scalableminds.util.image.Color], groupId: Option[Int], creationTime: Option[Long], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, info: Option[String] = None) extends ApplyableVolumeAction with ProtoGeometryImplicits { @@ -212,9 +222,10 @@ case class UpdateSegmentVolumeAction(id: Long, color: Option[com.scalableminds.util.image.Color], creationTime: Option[Long], groupId: Option[Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, - additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, info: Option[String] = None) extends ApplyableVolumeAction with ProtoGeometryImplicits @@ -241,6 +252,7 @@ case class UpdateSegmentVolumeAction(id: Long, } case class DeleteSegmentVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -258,6 +270,7 @@ case class DeleteSegmentVolumeAction(id: Long, } case class DeleteSegmentDataVolumeAction(id: Long, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -271,6 +284,7 @@ case class DeleteSegmentDataVolumeAction(id: Long, case class UpdateMappingNameVolumeAction(mappingName: Option[String], isEditable: Option[Boolean], isLocked: Option[Boolean], + actionTracingId: String, actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -289,6 +303,7 @@ case class UpdateMappingNameVolumeAction(mappingName: Option[String], } case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -304,9 +319,10 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme } case class CompactVolumeUpdateAction(name: String, + value: JsObject, + actionTracingId: String, actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, - value: JsObject, info: Option[String] = None) extends VolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -320,16 +336,23 @@ object CompactVolumeUpdateAction { override def reads(json: JsValue): JsResult[CompactVolumeUpdateAction] = for { name <- (json \ "name").validate[String] + actionTracingId <- (json \ "value" \ "actionTracingId").validate[String] actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] info <- (json \ "value" \ "info").validateOpt[String] value <- (json \ "value") .validate[JsObject] - .map(_ - "actionTimestamp") // TODO also separate out info + actionAuthorId - } yield CompactVolumeUpdateAction(name, actionTimestamp, actionAuthorId, value, info) + .map(_ - "actionTimestamp" - "actionTimestamp" - "actionAuthorId" - "info") + } yield CompactVolumeUpdateAction(name, value, actionTracingId, actionTimestamp, actionAuthorId, info) override def writes(o: CompactVolumeUpdateAction): JsValue = - Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) + Json.obj( + "name" -> o.name, + "value" -> (Json.obj("actionTracingId" -> o.actionTracingId, + "actionTimestamp" -> o.actionTimestamp, + "actionAuthorId" -> o.actionAuthorId, + "info" -> o.info) ++ o.value) + ) } } From 24e8fd210c2e715d0297e8900fc3085380e6762b Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 17 Jul 2024 14:41:00 +0200 Subject: [PATCH 023/361] remove unused stuff, log routes, access checks --- app/controllers/UserTokenController.scala | 47 ++++++------ .../services/AccessTokenService.scala | 3 + .../AnnotationTransactionService.scala | 2 +- .../annotation/AnnotationUpdateActions.scala | 16 +++++ ...ervice.scala => TSAnnotationService.scala} | 57 ++++++++++++++- .../SkeletonTracingController.scala | 27 ------- ...ler.scala => TSAnnotationController.scala} | 38 ++++++++-- .../controllers/TracingController.scala | 66 ++++------------- .../controllers/VolumeTracingController.scala | 15 +--- .../tracings/TracingService.scala | 16 ++--- .../EditableMappingService.scala | 43 ++++------- .../EditableMappingUpdater.scala | 8 ++- .../skeleton/SkeletonTracingService.scala | 72 ++----------------- .../volume/VolumeTracingService.scala | 11 +-- ...alableminds.webknossos.tracingstore.routes | 10 ++- 15 files changed, 191 insertions(+), 240 deletions(-) rename webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/{DSAnnotationService.scala => TSAnnotationService.scala} (62%) rename webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/{DSAnnotationController.scala => TSAnnotationController.scala} (62%) diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 410850dd38b..45ca19ae3fe 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -98,6 +98,8 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, handleDataSourceAccess(accessRequest.resourceId, accessRequest.mode, userBox)(sharingTokenAccessCtx) case AccessResourceType.tracing => handleTracingAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) + case AccessResourceType.annotation => + handleAnnotationAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) case AccessResourceType.jobExport => handleJobExportAccess(accessRequest.resourceId.name, accessRequest.mode, userBox) case _ => @@ -160,7 +162,19 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, private def handleTracingAccess(tracingId: String, mode: AccessMode, userBox: Box[User], - token: Option[String]): Fox[UserAccessAnswer] = { + token: Option[String]): Fox[UserAccessAnswer] = + if (tracingId == TracingIds.dummyTracingId) + Fox.successful(UserAccessAnswer(granted = true)) + else + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" + result <- handleAnnotationAccess(annotation._id.toString, mode, userBox, token) + } yield result + + private def handleAnnotationAccess(annotationId: String, + mode: AccessMode, + userBox: Box[User], + token: Option[String]): Fox[UserAccessAnswer] = { // Access is explicitly checked by userBox, not by DBAccessContext, as there is no token sharing for annotations // Optionally, an accessToken can be provided which explicitly looks up the read right the private link table @@ -171,25 +185,18 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case _ => Fox.successful(false) } - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(UserAccessAnswer(granted = true)) - else { - for { - annotation <- annotationInformationProvider.annotationForTracing(tracingId)(GlobalAccessContext) ?~> "annotation.notFound" - annotationAccessByToken <- token - .map(annotationPrivateLinkDAO.findOneByAccessToken) - .getOrElse(Fox.empty) - .futureBox - - allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) - restrictions <- annotationInformationProvider.restrictionsFor( - AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" - allowedByUser <- checkRestrictions(restrictions) ?~> "restrictions.failedToCheck" - allowed = allowedByToken || allowedByUser - } yield { - if (allowed) UserAccessAnswer(granted = true) - else UserAccessAnswer(granted = false, Some(s"No ${mode.toString} access to tracing")) - } + // TODO is a dummy annotation id needed? + for { + annotation <- annotationInformationProvider.provideAnnotation(annotationId, userBox)(GlobalAccessContext) ?~> "annotation.notFound" + annotationAccessByToken <- token.map(annotationPrivateLinkDAO.findOneByAccessToken).getOrElse(Fox.empty).futureBox + allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) + restrictions <- annotationInformationProvider.restrictionsFor( + AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" + allowedByUser <- checkRestrictions(restrictions) ?~> "restrictions.failedToCheck" + allowed = allowedByToken || allowedByUser + } yield { + if (allowed) UserAccessAnswer(granted = true) + else UserAccessAnswer(granted = false, Some(s"No ${mode.toString} access to tracing")) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index b21c415b13c..c7a593414fb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -46,6 +46,9 @@ object UserAccessRequest { def writeTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + def readAnnotation(annotationId: String): UserAccessRequest = + UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.read) + def writeAnnotation(annotationId: String): UserAccessRequest = UserAccessRequest(DataSourceId(annotationId, ""), AccessResourceType.annotation, AccessMode.write) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index a29f90e1f0a..3c7f5f48354 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -13,7 +13,7 @@ import scala.concurrent.duration._ class AnnotationTransactionService @Inject()( handledGroupIdStore: TracingStoreRedisStore, // TODO: instantiate here rather than with injection, give fix namespace prefix? uncommittedUpdatesStore: TracingStoreRedisStore, - annotationService: DSAnnotationService) { + annotationService: TSAnnotationService) { private val transactionGroupExpiry: FiniteDuration = 24 hours private val handledGroupCacheExpiry: FiniteDuration = 24 hours diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 725634f66e8..a00e21e9a7c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -59,6 +59,18 @@ case class UpdateMetadataAnnotationUpdateAction(name: Option[String], this.copy(actionAuthorId = authorId) } +case class RevertToVersionUpdateAction(sourceVersion: Long, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + object AddLayerAnnotationUpdateAction { implicit val jsonFormat: OFormat[AddLayerAnnotationUpdateAction] = Json.format[AddLayerAnnotationUpdateAction] } @@ -73,3 +85,7 @@ object UpdateMetadataAnnotationUpdateAction { implicit val jsonFormat: OFormat[UpdateMetadataAnnotationUpdateAction] = Json.format[UpdateMetadataAnnotationUpdateAction] } +object RevertToVersionUpdateAction { + implicit val jsonFormat: OFormat[RevertToVersionUpdateAction] = + Json.format[RevertToVersionUpdateAction] +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala similarity index 62% rename from webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala rename to webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c0c634e5f74..ef5fe9ab81c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/DSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -10,15 +10,21 @@ import com.scalableminds.webknossos.datastore.Annotation.{ UpdateLayerMetadataAnnotationUpdateAction, UpdateMetadataAnnotationUpdateAction } +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateNodeSkeletonAction, + DeleteNodeSkeletonAction, + UpdateTracingSkeletonAction +} import com.scalableminds.webknossos.tracingstore.tracings.volume.UpdateBucketVolumeAction import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} +import play.api.libs.json.{JsObject, JsValue, Json} import scalapb.GeneratedMessage import javax.inject.Inject import scala.concurrent.ExecutionContext -class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, +class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits { @@ -59,6 +65,18 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } } + private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( + implicit ec: ExecutionContext): Fox[List[UpdateAction]] = + if (desiredVersion == existingVersion) Fox.successful(List()) + else { + for { + updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } yield updateActionGroups.reverse.flatten + } + def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( implicit ec: ExecutionContext): Fox[AnnotationProto] = for { @@ -78,4 +96,41 @@ class DSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } } yield withAppliedChange.copy(version = withAppliedChange.version + 1L) + def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { + def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = + Json.obj( + "version" -> tuple._1, + "value" -> Json.toJson(tuple._2) + ) + + for { + updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + newestVersion, + oldestVersion)(fromJsonBytes[List[UpdateAction]]) + updateActionGroupsJs = updateActionGroups.map(versionedTupleToJson) + } yield Json.toJson(updateActionGroupsJs) + } + + def updateActionStatistics(tracingId: String): Fox[JsObject] = + for { + updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( + fromJsonBytes[List[UpdateAction]]) + updateActions = updateActionGroups.flatten + } yield { + Json.obj( + "updateTracingActionCount" -> updateActions.count { + case _: UpdateTracingSkeletonAction => true + case _ => false + }, + "createNodeActionCount" -> updateActions.count { + case _: CreateNodeSkeletonAction => true + case _ => false + }, + "deleteNodeActionCount" -> updateActions.count { + case _: DeleteNodeSkeletonAction => true + case _ => false + } + ) + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index f56b6d0d26e..b2d0640788f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -76,31 +76,4 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer } } - def updateActionLog(token: Option[String], - tracingId: String, - newestVersion: Option[Long], - oldestVersion: Option[Long]): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield { - Ok(updateLog) - } - } - } - } - - def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - statistics <- tracingService.updateActionStatistics(tracingId) - } yield { - Ok(statistics) - } - } - } - } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala similarity index 62% rename from webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala rename to webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index d03ea14c69e..cf65236e3b5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/DSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -6,21 +6,26 @@ import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationTransactionService, + TSAnnotationService, + UpdateActionGroup +} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext -class DSAnnotationController @Inject()( +class TSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, + annotationService: TSAnnotationService, annotationTransactionService: AnnotationTransactionService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { - def initialize(annotationId: String, token: Option[String]): Action[AnyContent] = + def initialize(token: Option[String], annotationId: String): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { @@ -31,7 +36,7 @@ class DSAnnotationController @Inject()( } } - def update(annotationId: String, token: Option[String]): Action[List[UpdateActionGroup]] = + def update(token: Option[String], annotationId: String): Action[List[UpdateActionGroup]] = Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { @@ -47,6 +52,31 @@ class DSAnnotationController @Inject()( } } + def updateActionLog(token: Option[String], + annotationId: String, + newestVersion: Option[Long] = None, + oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), + urlOrHeaderToken(token, request)) { + for { + updateLog <- annotationService.updateActionLog(annotationId, newestVersion, oldestVersion) + } yield Ok(updateLog) + } + } + } + + def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { + implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + for { + statistics <- annotationService.updateActionStatistics(tracingId) + } yield Ok(statistics) + } + } + } + } // get version history diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 4644d58e6a1..bad8308b096 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -1,25 +1,16 @@ package com.scalableminds.webknossos.tracingstore.controllers -import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{ - TracingSelector, - TracingService, - UpdateAction, - UpdateActionGroup -} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingUpdatesReport -} +import com.scalableminds.webknossos.tracingstore.tracings.{TracingSelector, TracingService} +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages -import play.api.libs.json.{Format, Json} +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scalapb.{GeneratedMessage, GeneratedMessageCompanion} @@ -46,8 +37,6 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C implicit def packMultipleOpt(tracings: List[Option[T]]): Ts - implicit val updateActionJsonFormat: Format[UpdateAction[T]] = tracingService.updateActionJsonFormat - implicit val ec: ExecutionContext implicit val bodyParsers: PlayBodyParsers @@ -121,8 +110,8 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def update(token: Option[String], tracingId: String): Action[List[UpdateActionGroup[T]]] = - Action.async(validateJson[List[UpdateActionGroup[T]]]) { implicit request => + def update(token: Option[String], tracingId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { @@ -148,7 +137,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C private def handleUpdateGroupForTransaction(tracingId: String, previousVersionFox: Fox[Long], - updateGroup: UpdateActionGroup[T], + updateGroup: UpdateActionGroup, userToken: Option[String]): Fox[Long] = for { previousCommittedVersion: Long <- previousVersionFox @@ -180,7 +169,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction // and commit them all. private def commitWithPending(tracingId: String, - updateGroup: UpdateActionGroup[T], + updateGroup: UpdateActionGroup, userToken: Option[String]): Fox[Long] = for { previousActionGroupsToCommit <- tracingService.getAllUncommittedFor(tracingId, updateGroup.transactionId) @@ -192,12 +181,12 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C _ <- tracingService.removeAllUncommittedFor(tracingId, updateGroup.transactionId) } yield commitResult - private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup[T]], - lastActionGroup: UpdateActionGroup[T]): UpdateActionGroup[T] = + private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup], + lastActionGroup: UpdateActionGroup): UpdateActionGroup = if (previousActionGroups.isEmpty) lastActionGroup else { val allActionGroups = previousActionGroups :+ lastActionGroup - UpdateActionGroup[T]( + UpdateActionGroup( version = lastActionGroup.version, timestamp = lastActionGroup.timestamp, authorId = lastActionGroup.authorId, @@ -212,41 +201,14 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C // Perform version check and commit the passed updates private def commitUpdates(tracingId: String, - updateGroups: List[UpdateActionGroup[T]], - userToken: Option[String]): Fox[Long] = { - val currentCommittedVersion: Fox[Long] = tracingService.currentVersion(tracingId) - val report = TracingUpdatesReport( - tracingId, - timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, - significantChangesCount = updateGroups.map(_.significantChangesCount).sum, - viewChangesCount = updateGroups.map(_.viewChangesCount).sum, - userToken - ) - remoteWebknossosClient.reportTracingUpdates(report).flatMap { _ => - updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => - previousVersion.flatMap { prevVersion: Long => - if (prevVersion + 1 == updateGroup.version) { - tracingService - .handleUpdateGroup(tracingId, updateGroup, prevVersion, userToken) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => updateGroup.version) - } else failUnlessAlreadyHandled(updateGroup, tracingId, prevVersion) - } - } - } - } + updateGroups: List[UpdateActionGroup], + userToken: Option[String]): Fox[Long] = ??? /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful * despite the backend receiving it just fine. */ - private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup[T], + private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, tracingId: String, previousVersion: Long): Fox[Long] = { val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 10acdac3bb8..0c733f0ed9a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -371,6 +371,7 @@ class VolumeTracingController @Inject()( volumeUpdate = UpdateMappingNameVolumeAction(Some(editableMappingId), isEditable = Some(true), isLocked = Some(true), + actionTracingId = tracingId, actionTimestamp = Some(System.currentTimeMillis())) _ <- tracingService.handleUpdateGroup( tracingId, @@ -453,20 +454,6 @@ class VolumeTracingController @Inject()( } } - def editableMappingUpdateActionLog(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Mapping is not editable" - updateLog <- editableMappingService.updateActionLog(mappingName) - } yield Ok(updateLog) - } - } - } - def editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 40bc6e1a123..7eacf130c2a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging @@ -46,8 +47,6 @@ trait TracingService[T <: GeneratedMessage] implicit def tracingCompanion: GeneratedMessageCompanion[T] - implicit val updateActionJsonFormat: Format[UpdateAction[T]] - // this should be longer than maxCacheTime in webknossos/AnnotationStore // so that the references saved there remain valid throughout their life private val temporaryStoreTimeout = 70 minutes @@ -75,7 +74,7 @@ trait TracingService[T <: GeneratedMessage] transactionId: String, transactionGroupIndex: Int, version: Long, - updateGroup: UpdateActionGroup[T], + updateGroup: UpdateActionGroup, expiry: FiniteDuration): Fox[Unit] = for { _ <- Fox.runIf(transactionGroupIndex > 0)( @@ -90,11 +89,11 @@ trait TracingService[T <: GeneratedMessage] Some(expiry)) } yield () - def getAllUncommittedFor(tracingId: String, transactionId: String): Fox[List[UpdateActionGroup[T]]] = + def getAllUncommittedFor(tracingId: String, transactionId: String): Fox[List[UpdateActionGroup]] = for { raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(tracingId, transactionId)) - parsed: Seq[UpdateActionGroup[T]] = raw.flatMap(itemAsString => - JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup[T]])) + parsed: Seq[UpdateActionGroup] = raw.flatMap(itemAsString => + JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup])) } yield parsed.toList.sortBy(_.transactionGroupIndex) def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = @@ -109,11 +108,6 @@ trait TracingService[T <: GeneratedMessage] Fox.successful(tracing) } - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup[T], - previousVersion: Long, - userToken: Option[String]): Fox[_] - def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) def find(tracingId: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 672814aff9d..a61c9cd116d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -21,7 +21,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup +import com.scalableminds.webknossos.tracingstore.annotation.{UpdateAction, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -36,7 +36,7 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import org.jgrapht.alg.flow.PushRelabelMFImpl import org.jgrapht.graph.{DefaultWeightedEdge, SimpleWeightedGraph} -import play.api.libs.json.{JsObject, JsValue, Json, OFormat} +import play.api.libs.json.{JsObject, Json, OFormat} import java.nio.file.Paths import java.util @@ -182,9 +182,8 @@ class EditableMappingService @Inject()( _ <- duplicateSegmentToAgglomerate(editableMappingId, newId, newVersion) _ <- duplicateAgglomerateToGraph(editableMappingId, newId, newVersion) updateActionsWithVersions <- getUpdateActionsWithVersions(editableMappingId, editableMappingInfoAndVersion._2, 0L) - _ <- Fox.serialCombined(updateActionsWithVersions) { - updateActionsWithVersion: (Long, List[EditableMappingUpdateAction]) => - tracingDataStore.editableMappingUpdates.put(newId, updateActionsWithVersion._1, updateActionsWithVersion._2) + _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion: (Long, List[UpdateAction]) => + tracingDataStore.editableMappingUpdates.put(newId, updateActionsWithVersion._1, updateActionsWithVersion._2) } } yield newId @@ -222,20 +221,6 @@ class EditableMappingService @Inject()( } yield () } - def updateActionLog(editableMappingId: String): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[EditableMappingUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - updates <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple(editableMappingId)( - fromJsonBytes[List[EditableMappingUpdateAction]]) - updateActionGroupsJs = updates.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - def getInfo(editableMappingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer, @@ -311,7 +296,7 @@ class EditableMappingService @Inject()( private def getPendingUpdates(editableMappingId: String, closestMaterializedVersion: Long, - closestMaterializableVersion: Long): Fox[List[EditableMappingUpdateAction]] = + closestMaterializableVersion: Long): Fox[List[UpdateAction]] = if (closestMaterializableVersion == closestMaterializedVersion) { Fox.successful(List.empty) } else { @@ -322,22 +307,20 @@ class EditableMappingService @Inject()( } yield updates.map(_._2).reverse.flatten } - private def getUpdateActionsWithVersions( - editableMappingId: String, - newestVersion: Long, - oldestVersion: Long): Fox[List[(Long, List[EditableMappingUpdateAction])]] = { + private def getUpdateActionsWithVersions(editableMappingId: String, + newestVersion: Long, + oldestVersion: Long): Fox[List[(Long, List[UpdateAction])]] = { val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) for { updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => val batchFrom = batchRange._1 val batchTo = batchRange._2 for { - res <- tracingDataStore.editableMappingUpdates - .getMultipleVersionsAsVersionValueTuple[List[EditableMappingUpdateAction]]( - editableMappingId, - Some(batchTo), - Some(batchFrom) - )(fromJsonBytes[List[EditableMappingUpdateAction]]) + res <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple[List[UpdateAction]]( + editableMappingId, + Some(batchTo), + Some(batchFrom) + )(fromJsonBytes[List[UpdateAction]]) } yield res } ?~> "Failed to fetch editable mapping update actions from fossilDB" flat = updateActionBatches.flatten diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 89303d32040..6f6e0aa1d61 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -9,6 +9,7 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient +import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, @@ -53,7 +54,7 @@ class EditableMappingUpdater( private val agglomerateToGraphBuffer: mutable.Map[String, (AgglomerateGraph, Boolean)] = new mutable.HashMap[String, (AgglomerateGraph, Boolean)]() - def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, updates: List[EditableMappingUpdateAction])( + def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, updates: List[UpdateAction])( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { updatedEditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) @@ -86,7 +87,7 @@ class EditableMappingUpdater( tracingDataStore.editableMappingsAgglomerateToGraph.put(key, newVersion, valueToFlush) } - private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[EditableMappingUpdateAction])( + private def updateIter(mappingFox: Fox[EditableMappingInfo], remainingUpdates: List[UpdateAction])( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = mappingFox.futureBox.flatMap { case Empty => @@ -107,7 +108,7 @@ class EditableMappingUpdater( mappingFox } - private def applyOneUpdate(mapping: EditableMappingInfo, update: EditableMappingUpdateAction)( + private def applyOneUpdate(mapping: EditableMappingInfo, update: UpdateAction)( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = update match { case splitAction: SplitAgglomerateUpdateAction => @@ -116,6 +117,7 @@ class EditableMappingUpdater( applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" case revertAction: RevertToVersionUpdateAction => revertToVersion(revertAction) ?~> "Failed to apply revert action" + case _ => Fox.failure("this is not an editable mapping update action!") } private def applySplitAction(editableMappingInfo: EditableMappingInfo, update: SplitAgglomerateUpdateAction)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 5eb19317295..a70eb859f7d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,14 +8,12 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction.SkeletonUpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.LayerUpdateAction import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import net.liftweb.common.{Box, Empty, Full} import play.api.i18n.MessagesProvider -import play.api.libs.json.{JsObject, JsValue, Json} import scala.concurrent.ExecutionContext @@ -42,20 +40,6 @@ class SkeletonTracingService @Inject()( def currentVersion(tracing: SkeletonTracing): Long = tracing.version - def handleUpdateGroup(tracingId: String, - updateActionGroup: UpdateActionGroup, - previousVersion: Long, - userToken: Option[String]): Fox[_] = - tracingDataStore.skeletonUpdates.put( - tracingId, - updateActionGroup.version, - updateActionGroup.actions - .map(_.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { //to the first action in the group, attach the group's info - case Nil => Nil - case first :: rest => first.addInfo(updateActionGroup.info) :: rest - } - ) - override def applyPendingUpdates(tracing: SkeletonTracing, tracingId: String, desiredVersion: Option[Long]): Fox[SkeletonTracing] = { @@ -94,16 +78,9 @@ class SkeletonTracingService @Inject()( private def findPendingUpdates(tracingId: String, existingVersion: Long, - desiredVersion: Long): Fox[List[SkeletonUpdateAction]] = - if (desiredVersion == existingVersion) Fox.successful(List()) - else { - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions( - tracingId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[SkeletonUpdateAction]]) - } yield updateActionGroups.reverse.flatten - } + desiredVersion: Long): Fox[List[SkeletonUpdateAction]] = ??? + + private def applyUpdateOn(tracing: SkeletonTracing, update: LayerUpdateAction): SkeletonTracing = ??? private def update(tracing: SkeletonTracing, tracingId: String, @@ -116,10 +93,10 @@ class SkeletonTracingService @Inject()( case Full(tracing) => remainingUpdates match { case List() => Fox.successful(tracing) - case RevertToVersionSkeletonAction(tracingId, sourceVersion, _, _, _) :: tail => + case RevertToVersionSkeletonAction(sourceVersion, tracingId, _, _, _) :: tail => val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) updateIter(sourceTracing, tail) - case update :: tail => updateIter(Full(update.applyOn(tracing)), tail) + case update :: tail => updateIter(Full(applyUpdateOn(tracing, update)), tail) } case _ => tracingFox } @@ -210,43 +187,6 @@ class SkeletonTracingService @Inject()( userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = Fox.successful(MergedVolumeStats.empty()) - def updateActionLog(tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[SkeletonUpdateAction]]) - updateActionGroupsJs = updateActionGroups.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - - def updateActionStatistics(tracingId: String): Fox[JsObject] = - for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( - fromJsonBytes[List[SkeletonUpdateAction]]) - updateActions = updateActionGroups.flatten - } yield { - Json.obj( - "updateTracingActionCount" -> updateActions.count { - case _: UpdateTracingSkeletonAction => true - case _ => false - }, - "createNodeActionCount" -> updateActions.count { - case _: CreateNodeSkeletonAction => true - case _ => false - }, - "deleteNodeActionCount" -> updateActions.count { - case _: DeleteNodeSkeletonAction => true - case _ => false - } - ) - } - def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)], userToken: Option[String]): Fox[String] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 5732d209dc4..53d507dbfd9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -114,6 +114,8 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" + private def applyUpdateOn(tracing: VolumeTracing, update: ApplyableVolumeAction): VolumeTracing = ??? + def handleUpdateGroup(tracingId: String, updateGroup: UpdateActionGroup, previousVersion: Long, @@ -161,7 +163,7 @@ class VolumeTracingService @Inject()( } else deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) - case a: ApplyableVolumeAction => Fox.successful(a.applyOn(tracing)) + case a: ApplyableVolumeAction => Fox.successful(applyUpdateOn(tracing, a)) case _ => Fox.failure("Unknown action.") } case Empty => @@ -172,10 +174,9 @@ class VolumeTracingService @Inject()( } _ <- segmentIndexBuffer.flush() _ <- save(updatedTracing.copy(version = updateGroup.version), Some(tracingId), updateGroup.version) - _ <- tracingDataStore.volumeUpdates.put( - tracingId, - updateGroup.version, - updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp)).map(_.transformToCompact)) + _ <- tracingDataStore.volumeUpdates.put(tracingId, + updateGroup.version, + updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp))) } yield Fox.successful(()) private def updateBucket(tracingId: String, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index da0b1f93131..d4079a604d8 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -5,8 +5,10 @@ # Health endpoint GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health -POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.DSAnnotationController.initialize(annotationId: String, token: Option[String]) -POST /annotation/update @com.scalableminds.webknossos.tracingstore.controllers.DSAnnotationController.update(annotationId: String, token: Option[String]) +POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.initialize(token: Option[String], annotationId: String) +POST /annotation/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) +POST /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) @@ -18,7 +20,6 @@ POST /volume/:tracingId/update @com.scalablemin GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], tracingId: String) POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], tracingId: String) POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long) @@ -37,7 +38,6 @@ POST /volume/mergedFromContents @com.scalablemin # Editable Mappings POST /mapping/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], tracingId: String) -GET /mapping/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingUpdateActionLog(token: Option[String], tracingId: String) GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], tracingId: String, agglomerateId: Long) GET /mapping/:tracingId/agglomerateIdForSegmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdForSegmentId(token: Option[String], tracingId: String, segmentId: Long) @@ -64,8 +64,6 @@ POST /skeleton/mergedFromIds @com.scalablemin GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) GET /skeleton/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionStatistics(token: Option[String], tracingId: String) -GET /skeleton/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) POST /skeleton/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.update(token: Option[String], tracingId: String) From 2cbadf24754cc7956840d0d89ffa2ab7ef56c5d0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 23 Jul 2024 14:09:40 +0200 Subject: [PATCH 024/361] fetch pending updates --- .../annotation/AnnotationLayerType.scala | 7 ++ .../annotation/AnnotationUpdateActions.scala | 4 +- .../annotation/TSAnnotationService.scala | 94 +++++++++++++++---- .../annotation/UpdateActions.scala | 7 +- .../controllers/TSAnnotationController.scala | 17 ++++ .../tracings/TracingDataStore.scala | 4 +- ...alableminds.webknossos.tracingstore.routes | 3 +- 7 files changed, 112 insertions(+), 24 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 0a9576b91aa..756180cbbd8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -1,8 +1,15 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerTypeProto object AnnotationLayerType extends ExtendedEnumeration { type AnnotationLayerType = Value val Skeleton, Volume = Value + + def toProto(annotationLayerType: AnnotationLayerType): AnnotationLayerTypeProto = + annotationLayerType match { + case Skeleton => AnnotationLayerTypeProto.skeleton + case Volume => AnnotationLayerTypeProto.volume + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index a00e21e9a7c..e9fa3268163 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -7,7 +7,7 @@ trait AnnotationUpdateAction extends UpdateAction case class AddLayerAnnotationUpdateAction(layerName: String, tracingId: String, - typ: AnnotationLayerType, + `type`: AnnotationLayerType, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -21,7 +21,7 @@ case class AddLayerAnnotationUpdateAction(layerName: String, case class DeleteLayerAnnotationUpdateAction(tracingId: String, layerName: String, // Just stored for nicer-looking history - typ: AnnotationLayerType, // Just stored for nicer-looking history + `type`: AnnotationLayerType, // Just stored for nicer-looking history actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ef5fe9ab81c..04ef116adc3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -2,14 +2,8 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.Annotation.{ - AddLayerAnnotationUpdateAction, - AnnotationLayerProto, - AnnotationProto, - DeleteLayerAnnotationUpdateAction, - UpdateLayerMetadataAnnotationUpdateAction, - UpdateMetadataAnnotationUpdateAction -} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ CreateNodeSkeletonAction, DeleteNodeSkeletonAction, @@ -18,8 +12,8 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ import com.scalableminds.webknossos.tracingstore.tracings.volume.UpdateBucketVolumeAction import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} +import net.liftweb.common.{Empty, Full} import play.api.libs.json.{JsObject, JsValue, Json} -import scalapb.GeneratedMessage import javax.inject.Inject import scala.concurrent.ExecutionContext @@ -77,24 +71,26 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield updateActionGroups.reverse.flatten } - def applyUpdate(annotation: AnnotationProto, updateAction: GeneratedMessage)( + def applyUpdate(annotation: AnnotationProto, updateAction: UpdateAction)( implicit ec: ExecutionContext): Fox[AnnotationProto] = for { - - withAppliedChange <- updateAction match { + updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => Fox.successful( - annotation.copy(layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, a.name, `type` = a.`type`))) + annotation.copy( + layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, + a.layerName, + `type` = AnnotationLayerType.toProto(a.`type`)))) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) case a: UpdateLayerMetadataAnnotationUpdateAction => Fox.successful(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(name = a.name) else l))) + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) case a: UpdateMetadataAnnotationUpdateAction => Fox.successful(annotation.copy(name = a.name, description = a.description)) - case _ => Fox.failure("Received unsupported AnnotationUpdaetAction action") + case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } - } yield withAppliedChange.copy(version = withAppliedChange.version + 1L) + } yield updated.copy(version = updated.version + 1L) def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -112,6 +108,72 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield Json.toJson(updateActionGroupsJs) } + def get(annotationId: String, version: Option[Long], applyUpdates: Boolean, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[AnnotationProto] = + for { + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) + annotation = annotationWithVersion.value + updated <- if (applyUpdates) applyPendingUpdates(annotation, annotationId, version, userToken) + else Fox.successful(annotation) + } yield updated + + private def applyPendingUpdates(annotation: AnnotationProto, + annotationId: String, + targetVersionOpt: Option[Long], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationProto] = + for { + targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) + updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) + updated <- applyUpdates(annotation, annotationId, updates, targetVersion, userToken) + } yield updated + + private def applyUpdates(annotation: AnnotationProto, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long, + userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationProto] = { + + def updateIter(tracingFox: Fox[AnnotationProto], remainingUpdates: List[UpdateAction]): Fox[AnnotationProto] = + tracingFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotation) => + remainingUpdates match { + case List() => Fox.successful(annotation) + case RevertToVersionUpdateAction(sourceVersion, _, _, _) :: tail => + val sourceTracing = get(annotationId, Some(sourceVersion), applyUpdates = true, userToken) + updateIter(sourceTracing, tail) + case update :: tail => updateIter(applyUpdate(annotation, update), tail) + } + case _ => tracingFox + } + + if (updates.isEmpty) Full(annotation) + else { + for { + updated <- updateIter(Some(annotation), updates) + } yield updated.withVersion(targetVersion) + } + } + + private def determineTargetVersion(annotation: AnnotationProto, + annotationId: String, + targetVersionOpt: Option[Long]): Fox[Long] = + /* + * Determines the newest saved version from the updates column. + * if there are no updates at all, assume annotation is brand new (possibly created from NML, + * hence the emptyFallbck annotation.version) + */ + for { + newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, + mayBeEmpty = Some(true), + emptyFallback = Some(annotation.version)) + } yield { + targetVersionOpt match { + case None => newestUpdateVersion + case Some(desiredSome) => math.min(desiredSome, newestUpdateVersion) + } + } + def updateActionStatistics(tracingId: String): Fox[JsObject] = for { updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 04c58bf3235..ee0cfaf4820 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -86,9 +86,10 @@ object UpdateAction { deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) // Volume - case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) - case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) + case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) + case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) + case "updateUserBoundingBoxes" => + deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) // TODO: rename key (must be different from skeleton action) case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index cf65236e3b5..6739b3697d0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -77,6 +77,23 @@ class TSAnnotationController @Inject()( } } + def get(token: Option[String], annotationId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), + urlOrHeaderToken(token, request)) { + for { + annotationProto <- annotationService.get(annotationId, + version, + applyUpdates = false, + urlOrHeaderToken(token, request)) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } + } // get version history diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index 4de87d378b3..f6651c5d090 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -22,8 +22,6 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val skeletons = new FossilDBClient("skeletons", config, slackNotificationService) - lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) - lazy val skeletonUpdates = new FossilDBClient("skeletonUpdates", config, slackNotificationService) lazy val volumes = new FossilDBClient("volumes", config, slackNotificationService) @@ -46,6 +44,8 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val annotations = new FossilDBClient("annotations", config, slackNotificationService) + lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) + private def shutdown(): Unit = { healthClient.shutdown() skeletons.shutdown() diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index d4079a604d8..c641445e98c 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -6,7 +6,8 @@ GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.initialize(token: Option[String], annotationId: String) -POST /annotation/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) POST /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) From e5f128edeaa032a2b09f1ef42807d9cb0ae092ea Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jul 2024 11:36:32 +0200 Subject: [PATCH 025/361] TracingCollection for updating --- .../annotation/TSAnnotationService.scala | 67 ++++++++++++++----- 1 file changed, 52 insertions(+), 15 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 04ef116adc3..8dbebe7ff9f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -3,13 +3,16 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ CreateNodeSkeletonAction, DeleteNodeSkeletonAction, + SkeletonUpdateAction, UpdateTracingSkeletonAction } -import com.scalableminds.webknossos.tracingstore.tracings.volume.UpdateBucketVolumeAction +import com.scalableminds.webknossos.tracingstore.tracings.volume.{UpdateBucketVolumeAction, VolumeUpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import net.liftweb.common.{Empty, Full} @@ -18,6 +21,8 @@ import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject import scala.concurrent.ExecutionContext +case class TracingCollection(tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) {} + class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits { @@ -71,8 +76,9 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield updateActionGroups.reverse.flatten } - def applyUpdate(annotation: AnnotationProto, updateAction: UpdateAction)( - implicit ec: ExecutionContext): Fox[AnnotationProto] = + private def applyUpdate(annotationWithTracings: (AnnotationProto, TracingCollection), updateAction: UpdateAction)( + implicit ec: ExecutionContext): Fox[(AnnotationProto, TracingCollection)] = { + val annotation = annotationWithTracings._1 for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => @@ -90,7 +96,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl Fox.successful(annotation.copy(name = a.name, description = a.description)) case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } - } yield updated.copy(version = updated.version + 1L) + } yield (updated.copy(version = updated.version + 1L), annotationWithTracings._2) + } def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -124,34 +131,64 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) - updated <- applyUpdates(annotation, annotationId, updates, targetVersion, userToken) + tracingCollection <- findTracingsForUpdates(annotation, updates) + updated <- applyUpdates(annotation, tracingCollection, annotationId, updates, targetVersion, userToken) } yield updated + private def findTracingsForUpdates(annotation: AnnotationProto, updates: List[UpdateAction])( + implicit ec: ExecutionContext): Fox[TracingCollection] = { + val skeletonTracingIds = updates.flatMap { + case u: SkeletonUpdateAction => Some(u.actionTracingId) + case _ => None + } + val volumeTracingIds = updates.flatMap { + case u: VolumeUpdateAction => Some(u.actionTracingId) + case _ => None + } + for { + skeletonTracings <- Fox.serialCombined(skeletonTracingIds)( + id => + tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( + fromProtoBytes[SkeletonTracing])) + volumeTracings <- Fox.serialCombined(volumeTracingIds)( + id => + tracingDataStore.volumes + .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) + skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds + .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds + .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + } yield TracingCollection(skeletonTracingsMap ++ volumeTracingsMap) + } + private def applyUpdates(annotation: AnnotationProto, + tracingCollection: TracingCollection, annotationId: String, updates: List[UpdateAction], targetVersion: Long, userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationProto] = { - def updateIter(tracingFox: Fox[AnnotationProto], remainingUpdates: List[UpdateAction]): Fox[AnnotationProto] = - tracingFox.futureBox.flatMap { + def updateIter(annotationWithTracingsFox: Fox[(AnnotationProto, TracingCollection)], + remainingUpdates: List[UpdateAction]): Fox[(AnnotationProto, TracingCollection)] = + annotationWithTracingsFox.futureBox.flatMap { case Empty => Fox.empty - case Full(annotation) => + case Full(annotationWithTracings) => remainingUpdates match { - case List() => Fox.successful(annotation) + case List() => Fox.successful(annotationWithTracings) case RevertToVersionUpdateAction(sourceVersion, _, _, _) :: tail => - val sourceTracing = get(annotationId, Some(sourceVersion), applyUpdates = true, userToken) - updateIter(sourceTracing, tail) - case update :: tail => updateIter(applyUpdate(annotation, update), tail) + ??? + case update :: tail => updateIter(applyUpdate(annotationWithTracings, update), tail) } - case _ => tracingFox + case _ => annotationWithTracingsFox } if (updates.isEmpty) Full(annotation) else { for { - updated <- updateIter(Some(annotation), updates) - } yield updated.withVersion(targetVersion) + updated <- updateIter(Some((annotation, tracingCollection)), updates) + } yield updated._1.withVersion(targetVersion) } } From 201a1c90608512052544f1e9017c633c13fad83c Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jul 2024 12:02:59 +0200 Subject: [PATCH 026/361] AnnotationWithTracings --- .../annotation/TSAnnotationService.scala | 76 ++++++++++++------- 1 file changed, 50 insertions(+), 26 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8dbebe7ff9f..9d83a840e4b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -21,6 +21,38 @@ import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject import scala.concurrent.ExecutionContext +case class AnnotationWithTracings(annotation: AnnotationProto, + tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) { + def getSkeleton(tracingId: String): SkeletonTracing = ??? + + def version: Long = annotation.version + + def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings( + annotation.copy( + layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, + a.layerName, + `type` = AnnotationLayerType.toProto(a.`type`))), + tracingsById) + + def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), tracingsById) + + def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l)), + tracingsById) + + def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), tracingsById) + + def incrementVersion: AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), tracingsById) + + def withVersion(newVersion: Long): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) +} + case class TracingCollection(tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) {} class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, @@ -76,28 +108,21 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield updateActionGroups.reverse.flatten } - private def applyUpdate(annotationWithTracings: (AnnotationProto, TracingCollection), updateAction: UpdateAction)( - implicit ec: ExecutionContext): Fox[(AnnotationProto, TracingCollection)] = { - val annotation = annotationWithTracings._1 + private def applyUpdate(annotationWithTracings: AnnotationWithTracings, updateAction: UpdateAction)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => - Fox.successful( - annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, - a.layerName, - `type` = AnnotationLayerType.toProto(a.`type`)))) + Fox.successful(annotationWithTracings.addTracing(a)) case a: DeleteLayerAnnotationUpdateAction => - Fox.successful(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) + Fox.successful(annotationWithTracings.deleteTracing(a)) case a: UpdateLayerMetadataAnnotationUpdateAction => - Fox.successful(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) + Fox.successful(annotationWithTracings.updateLayerMetadata(a)) case a: UpdateMetadataAnnotationUpdateAction => - Fox.successful(annotation.copy(name = a.name, description = a.description)) + Fox.successful(annotationWithTracings.updateMetadata(a)) case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } - } yield (updated.copy(version = updated.version + 1L), annotationWithTracings._2) - } + } yield updated.incrementVersion def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -131,12 +156,12 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) - tracingCollection <- findTracingsForUpdates(annotation, updates) - updated <- applyUpdates(annotation, tracingCollection, annotationId, updates, targetVersion, userToken) - } yield updated + annotationWithTracings <- findTracingsForUpdates(annotation, updates) + updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) + } yield updated.annotation private def findTracingsForUpdates(annotation: AnnotationProto, updates: List[UpdateAction])( - implicit ec: ExecutionContext): Fox[TracingCollection] = { + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = updates.flatMap { case u: SkeletonUpdateAction => Some(u.actionTracingId) case _ => None @@ -160,18 +185,17 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) .toMap - } yield TracingCollection(skeletonTracingsMap ++ volumeTracingsMap) + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap) } - private def applyUpdates(annotation: AnnotationProto, - tracingCollection: TracingCollection, + private def applyUpdates(annotation: AnnotationWithTracings, annotationId: String, updates: List[UpdateAction], targetVersion: Long, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationProto] = { + userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { - def updateIter(annotationWithTracingsFox: Fox[(AnnotationProto, TracingCollection)], - remainingUpdates: List[UpdateAction]): Fox[(AnnotationProto, TracingCollection)] = + def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = annotationWithTracingsFox.futureBox.flatMap { case Empty => Fox.empty case Full(annotationWithTracings) => @@ -187,8 +211,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl if (updates.isEmpty) Full(annotation) else { for { - updated <- updateIter(Some((annotation, tracingCollection)), updates) - } yield updated._1.withVersion(targetVersion) + updated <- updateIter(Some(annotation), updates) + } yield updated.withVersion(targetVersion) } } From fd5ccd33721a0bfe66e88d08e0e9ac4f79f48f82 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jul 2024 13:31:54 +0200 Subject: [PATCH 027/361] applyOn skeleton --- .../annotation/TSAnnotationService.scala | 16 +++- .../updating/SkeletonUpdateActions.scala | 78 +++++++++---------- 2 files changed, 53 insertions(+), 41 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 9d83a840e4b..c9a818ea636 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.option2Fox import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -51,9 +52,17 @@ case class AnnotationWithTracings(annotation: AnnotationProto, def withVersion(newVersion: Long): AnnotationWithTracings = AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) -} -case class TracingCollection(tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) {} + def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + skeletonTracingEither <- tracingsById.get(a.actionTracingId).toFox + skeletonTracing <- skeletonTracingEither match { + case Left(st: SkeletonTracing) => Fox.successful(st) + case _ => Fox.failure("wrong tracing type") + } + updated = a.applyOn(skeletonTracing) + } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) +} class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore) @@ -113,6 +122,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => + // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) Fox.successful(annotationWithTracings.addTracing(a)) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotationWithTracings.deleteTracing(a)) @@ -120,6 +130,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl Fox.successful(annotationWithTracings.updateLayerMetadata(a)) case a: UpdateMetadataAnnotationUpdateAction => Fox.successful(annotationWithTracings.updateMetadata(a)) + case a: SkeletonUpdateAction => + annotationWithTracings.applySkeletonAction(a) case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } } yield updated.incrementVersion diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index cf43dc92e47..0fe3c367958 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -2,13 +2,16 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.SkeletonTracing.{Edge, Node, SkeletonTracing, Tree, TreeGroup} +import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType.TreeType import play.api.libs.json._ -trait SkeletonUpdateAction extends LayerUpdateAction +trait SkeletonUpdateAction extends LayerUpdateAction { + def applyOn(tracing: SkeletonTracing): SkeletonTracing +} case class CreateTreeSkeletonAction(id: Int, color: Option[com.scalableminds.util.image.Color], @@ -26,7 +29,7 @@ case class CreateTreeSkeletonAction(id: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val newTree = Tree( id, Nil, @@ -42,7 +45,7 @@ case class CreateTreeSkeletonAction(id: Int, edgesAreVisible ) tracing.withTrees(newTree +: tracing.trees) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -57,8 +60,8 @@ case class DeleteTreeSkeletonAction(id: Int, actionAuthorId: Option[String] = None, info: Option[String] = None) extends SkeletonUpdateAction { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withTrees(tracing.trees.filter(_.treeId != id))*/ + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withTrees(tracing.trees.filter(_.treeId != id)) override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -83,7 +86,7 @@ case class UpdateTreeSkeletonAction(id: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy( color = colorOptToProto(color).orElse(tree.color), @@ -96,7 +99,7 @@ case class UpdateTreeSkeletonAction(id: Int, ) tracing.withTrees(mapTrees(tracing, id, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -113,7 +116,7 @@ case class MergeTreeSkeletonAction(sourceId: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /* + // only nodes and edges are merged here, // other properties are managed explicitly // by the frontend with extra actions @@ -126,7 +129,7 @@ case class MergeTreeSkeletonAction(sourceId: Int, } tracing.withTrees(mapTrees(tracing, targetId, treeTransform).filter(_.treeId != sourceId)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -147,7 +150,6 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /* // this should only move a whole component, // that is disjoint from the rest of the tree override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { @@ -170,7 +172,6 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], tracing.withTrees(tracing.trees.map(selectTree)) } - */ override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -190,10 +191,10 @@ case class CreateEdgeSkeletonAction(source: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withEdges(Edge(source, target) +: tree.edges) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -213,10 +214,10 @@ case class DeleteEdgeSkeletonAction(source: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edges = tree.edges.filter(_ != Edge(source, target))) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -245,7 +246,7 @@ case class CreateNodeSkeletonAction(id: Int, extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val rotationOrDefault = rotation getOrElse NodeDefaults.rotation val newNode = Node( id, @@ -263,7 +264,7 @@ case class CreateNodeSkeletonAction(id: Int, def treeTransform(tree: Tree) = tree.withNodes(newNode +: tree.nodes) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -292,7 +293,7 @@ case class UpdateNodeSkeletonAction(id: Int, extends SkeletonUpdateAction with SkeletonUpdateActionHelper with ProtoGeometryImplicits { - /* override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { val rotationOrDefault = rotation getOrElse NodeDefaults.rotation val newNode = Node( @@ -313,7 +314,6 @@ case class UpdateNodeSkeletonAction(id: Int, tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) } - */ override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -333,13 +333,13 @@ case class DeleteNodeSkeletonAction(nodeId: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.withNodes(tree.nodes.filter(_.id != nodeId)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -357,8 +357,8 @@ case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withTreeGroups(treeGroups.map(convertTreeGroup))*/ + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withTreeGroups(treeGroups.map(convertTreeGroup)) override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -381,7 +381,7 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], editPositionAdditionalCoordinates: Option[Seq[AdditionalCoordinate]] = None) extends SkeletonUpdateAction with ProtoGeometryImplicits { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing.copy( editPosition = editPosition, editRotation = editRotation, @@ -389,7 +389,7 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], userBoundingBox = userBoundingBox, activeNodeId = activeNode, editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) - )*/ + ) override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -406,8 +406,8 @@ case class RevertToVersionSkeletonAction(sourceVersion: Long, actionAuthorId: Option[String] = None, info: Option[String] = None) extends SkeletonUpdateAction { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - throw new Exception("RevertToVersionAction applied on unversioned tracing")*/ + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + throw new Exception("RevertToVersionAction applied on unversioned tracing") override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -426,11 +426,11 @@ case class UpdateTreeVisibilitySkeletonAction(treeId: Int, info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(isVisible = Some(isVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -449,7 +449,7 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], info: Option[String] = None) extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateTreeGroups(treeGroups: Seq[TreeGroup]) = { def treeTransform(tree: Tree) = if (treeGroups.exists(group => tree.groupId.contains(group.groupId))) @@ -469,7 +469,7 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], .map(group => updateTreeGroups(GroupUtils.getAllChildrenTreeGroups(group))) .getOrElse(tracing) } - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -489,11 +489,11 @@ case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, extends SkeletonUpdateAction with SkeletonUpdateActionHelper { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def treeTransform(tree: Tree) = tree.copy(edgesAreVisible = Some(edgesAreVisible)) tracing.withTrees(mapTrees(tracing, treeId, treeTransform)) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -510,8 +510,8 @@ case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundi actionAuthorId: Option[String] = None, info: Option[String] = None) extends SkeletonUpdateAction { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = + tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -529,7 +529,7 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I actionAuthorId: Option[String] = None, info: Option[String] = None) extends SkeletonUpdateAction { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = { def updateUserBoundingBoxes() = tracing.userBoundingBoxes.map { boundingBox => if (boundingBoxId.forall(_ == boundingBox.id)) @@ -539,7 +539,7 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I } tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) - }*/ + } override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -556,7 +556,7 @@ case class UpdateTdCameraSkeletonAction(actionTimestamp: Option[Long] = None, info: Option[String] = None) extends SkeletonUpdateAction { - /*override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing*/ + override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) From d8a6ddb40f6c2468cc491eef87ca7cbdeb8effb0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 29 Jul 2024 13:42:50 +0200 Subject: [PATCH 028/361] ApplyableVolumeUpdateAction --- .../annotation/TSAnnotationService.scala | 24 +++++++- .../volume/VolumeTracingService.scala | 4 +- .../tracings/volume/VolumeUpdateActions.scala | 61 ++++++++++--------- 3 files changed, 55 insertions(+), 34 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c9a818ea636..8f1d05cbfcf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -13,7 +13,11 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ SkeletonUpdateAction, UpdateTracingSkeletonAction } -import com.scalableminds.webknossos.tracingstore.tracings.volume.{UpdateBucketVolumeAction, VolumeUpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, + UpdateBucketVolumeAction, + VolumeUpdateAction +} import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} import net.liftweb.common.{Empty, Full} @@ -51,7 +55,7 @@ case class AnnotationWithTracings(annotation: AnnotationProto, AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), tracingsById) def withVersion(newVersion: Long): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) + AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) // TODO also update version in tracings? def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { @@ -62,6 +66,16 @@ case class AnnotationWithTracings(annotation: AnnotationProto, } updated = a.applyOn(skeletonTracing) } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) + + def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + volumeTracingEither <- tracingsById.get(a.actionTracingId).toFox + volumeTracing <- volumeTracingEither match { + case Right(vt: VolumeTracing) => Fox.successful(vt) + case _ => Fox.failure("wrong tracing type") + } + updated = a.applyOn(volumeTracing) + } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated))) } class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, @@ -132,9 +146,13 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl Fox.successful(annotationWithTracings.updateMetadata(a)) case a: SkeletonUpdateAction => annotationWithTracings.applySkeletonAction(a) + case a: ApplyableVolumeUpdateAction => + annotationWithTracings.applyVolumeAction(a) + case a: VolumeUpdateAction => + Fox.successful(annotationWithTracings) // TODO case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } - } yield updated.incrementVersion + } yield updated def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 53d507dbfd9..792a893d3d6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -114,7 +114,7 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - private def applyUpdateOn(tracing: VolumeTracing, update: ApplyableVolumeAction): VolumeTracing = ??? + private def applyUpdateOn(tracing: VolumeTracing, update: ApplyableVolumeUpdateAction): VolumeTracing = ??? def handleUpdateGroup(tracingId: String, updateGroup: UpdateActionGroup, @@ -163,7 +163,7 @@ class VolumeTracingService @Inject()( } else deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) - case a: ApplyableVolumeAction => Fox.successful(applyUpdateOn(tracing, a)) + case a: ApplyableVolumeUpdateAction => Fox.successful(applyUpdateOn(tracing, a)) case _ => Fox.failure("Unknown action.") } case Empty => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 625811c2f0e..a4b3ef91ac5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -3,6 +3,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import java.util.Base64 import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} +import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} @@ -27,7 +28,9 @@ trait VolumeUpdateActionHelper { trait VolumeUpdateAction extends LayerUpdateAction -trait ApplyableVolumeAction extends VolumeUpdateAction +trait ApplyableVolumeUpdateAction extends VolumeUpdateAction { + def applyOn(tracing: VolumeTracing): VolumeTracing +} case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, @@ -88,15 +91,15 @@ case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBounding actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto))*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) } case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int], @@ -105,7 +108,7 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) @@ -113,9 +116,9 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int override def isViewOnlyChange: Boolean = true - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { + override def applyOn(tracing: VolumeTracing): VolumeTracing = { - def updateUserBoundingBoxes(): Seq[geometry.NamedBoundingBoxProto] = + def updateUserBoundingBoxes(): Seq[NamedBoundingBoxProto] = tracing.userBoundingBoxes.map { boundingBox => if (boundingBoxId.forall(_ == boundingBox.id)) boundingBox.copy(isVisible = Some(isVisible)) @@ -124,21 +127,21 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int } tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) - }*/ + } } case class RemoveFallbackLayerVolumeAction(actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.clearFallbackLayer*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.clearFallbackLayer } case class ImportVolumeDataVolumeAction(actionTracingId: String, @@ -146,28 +149,28 @@ case class ImportVolumeDataVolumeAction(actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.copy(largestSegmentId = largestSegmentId)*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy(largestSegmentId = largestSegmentId) } case class AddSegmentIndexVolumeAction(actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.copy(hasSegmentIndex = Some(true))*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy(hasSegmentIndex = Some(true)) } @@ -197,7 +200,7 @@ case class CreateSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = @@ -206,7 +209,7 @@ case class CreateSegmentVolumeAction(id: Long, this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { + override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = Segment(id, anchorPosition.map(vec3IntToProto), @@ -216,7 +219,7 @@ case class CreateSegmentVolumeAction(id: Long, groupId, AdditionalCoordinate.toProto(additionalCoordinates)) tracing.addSegments(newSegment) - }*/ + } } case class UpdateSegmentVolumeAction(id: Long, @@ -230,7 +233,7 @@ case class UpdateSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction + extends ApplyableVolumeUpdateAction with ProtoGeometryImplicits with VolumeUpdateActionHelper { @@ -240,7 +243,7 @@ case class UpdateSegmentVolumeAction(id: Long, this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = { + override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = segment.copy( anchorPosition = anchorPosition.map(vec3IntToProto), @@ -251,7 +254,7 @@ case class UpdateSegmentVolumeAction(id: Long, anchorPositionAdditionalCoordinates = AdditionalCoordinate.toProto(additionalCoordinates) ) tracing.withSegments(mapSegments(tracing, id, segmentTransform)) - }*/ + } } case class DeleteSegmentVolumeAction(id: Long, @@ -291,18 +294,18 @@ case class UpdateMappingNameVolumeAction(mappingName: Option[String], actionTimestamp: Option[Long], actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /* override def applyOn(tracing: VolumeTracing): VolumeTracing = + override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked else tracing.copy(mappingName = mappingName, mappingIsEditable = Some(isEditable.getOrElse(false)), - mappingIsLocked = Some(isLocked.getOrElse(false)))*/ + mappingIsLocked = Some(isLocked.getOrElse(false))) } case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegmentGroup], @@ -310,10 +313,10 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends VolumeUpdateAction + extends ApplyableVolumeUpdateAction with VolumeUpdateActionHelper { - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup))*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegmentGroups(segmentGroups.map(convertSegmentGroup)) override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = From 54444aab3d77bca67c31fc1d863e5be1b215a4bd Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jul 2024 11:59:45 +0200 Subject: [PATCH 029/361] make more update actions applyable --- .../annotation/TSAnnotationService.scala | 33 ++++++++++------ .../volume/VolumeTracingService.scala | 19 ++------- .../tracings/volume/VolumeUpdateActions.scala | 39 +++++++++---------- 3 files changed, 43 insertions(+), 48 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8f1d05cbfcf..626d6519141 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -20,7 +20,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ } import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} -import net.liftweb.common.{Empty, Full} +import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject @@ -28,7 +28,24 @@ import scala.concurrent.ExecutionContext case class AnnotationWithTracings(annotation: AnnotationProto, tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) { - def getSkeleton(tracingId: String): SkeletonTracing = ??? + + def getSkeleton(tracingId: String): Box[SkeletonTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + skeletonTracing <- tracingEither match { + case Left(st: SkeletonTracing) => Full(st) + case _ => Failure(f"Tried to access tracing $tracingId as skeleton, but is volume") + } + } yield skeletonTracing + + def getVolume(tracingId: String): Box[VolumeTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + volumeTracing <- tracingEither match { + case Right(vt: VolumeTracing) => Full(vt) + case _ => Failure(f"Tried to access tracing $tracingId as volume, but is skeleton") + } + } yield volumeTracing def version: Long = annotation.version @@ -59,21 +76,13 @@ case class AnnotationWithTracings(annotation: AnnotationProto, def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - skeletonTracingEither <- tracingsById.get(a.actionTracingId).toFox - skeletonTracing <- skeletonTracingEither match { - case Left(st: SkeletonTracing) => Fox.successful(st) - case _ => Fox.failure("wrong tracing type") - } + skeletonTracing <- getSkeleton(a.actionTracingId) updated = a.applyOn(skeletonTracing) } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - volumeTracingEither <- tracingsById.get(a.actionTracingId).toFox - volumeTracing <- volumeTracingEither match { - case Right(vt: VolumeTracing) => Fox.successful(vt) - case _ => Fox.failure("wrong tracing type") - } + volumeTracing <- getVolume(a.actionTracingId) updated = a.applyOn(volumeTracing) } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated))) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 792a893d3d6..e13f66a5bc7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -144,27 +144,14 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot mutate volume data in annotation with editable mapping.") } else updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." - case a: UpdateTracingVolumeAction => - Fox.successful( - tracing.copy( - activeSegmentId = Some(a.activeSegmentId), - editPosition = a.editPosition, - editRotation = a.editRotation, - largestSegmentId = a.largestSegmentId, - zoomLevel = a.zoomLevel, - editPositionAdditionalCoordinates = - AdditionalCoordinate.toProto(a.editPositionAdditionalCoordinates) - )) - case a: RevertToVersionVolumeAction => - revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) + //case a: RevertToVersionVolumeAction => revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) case a: DeleteSegmentDataVolumeAction => if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") } else deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case _: UpdateTdCameraVolumeAction => Fox.successful(tracing) - case a: ApplyableVolumeUpdateAction => Fox.successful(applyUpdateOn(tracing, a)) - case _ => Fox.failure("Unknown action.") + case a: ApplyableVolumeUpdateAction => Fox.successful(applyUpdateOn(tracing, a)) + case _ => Fox.failure("Unknown action.") } case Empty => Fox.empty diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index a4b3ef91ac5..763dbbbd02f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -64,26 +64,24 @@ case class UpdateTracingVolumeAction( actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None -) extends VolumeUpdateAction { +) extends ApplyableVolumeUpdateAction + with ProtoGeometryImplicits { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def isViewOnlyChange: Boolean = true -} - -case class RevertToVersionVolumeAction(sourceVersion: Long, - actionTracingId: String, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.copy( + activeSegmentId = Some(activeSegmentId), + editPosition = editPosition, + editRotation = editRotation, + largestSegmentId = largestSegmentId, + zoomLevel = zoomLevel, + editPositionAdditionalCoordinates = AdditionalCoordinate.toProto(editPositionAdditionalCoordinates) + ) } case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBoundingBox], @@ -178,7 +176,7 @@ case class UpdateTdCameraVolumeAction(actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends VolumeUpdateAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -186,6 +184,9 @@ case class UpdateTdCameraVolumeAction(actionTracingId: String, this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing + override def isViewOnlyChange: Boolean = true } @@ -262,7 +263,7 @@ case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends VolumeUpdateAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -270,8 +271,8 @@ case class DeleteSegmentVolumeAction(id: Long, this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - /*override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing.withSegments(tracing.segments.filter(_.segmentId != id))*/ + override def applyOn(tracing: VolumeTracing): VolumeTracing = + tracing.withSegments(tracing.segments.filter(_.segmentId != id)) } @@ -324,6 +325,7 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) } +// TODO this now exists only for UpdateBucket. Make it a slimmed down version of that rather than generic? case class CompactVolumeUpdateAction(name: String, value: JsObject, actionTracingId: String, @@ -368,9 +370,6 @@ object UpdateBucketVolumeAction { object UpdateTracingVolumeAction { implicit val jsonFormat: OFormat[UpdateTracingVolumeAction] = Json.format[UpdateTracingVolumeAction] } -object RevertToVersionVolumeAction { - implicit val jsonFormat: OFormat[RevertToVersionVolumeAction] = Json.format[RevertToVersionVolumeAction] -} object UpdateUserBoundingBoxesVolumeAction { implicit val jsonFormat: OFormat[UpdateUserBoundingBoxesVolumeAction] = Json.format[UpdateUserBoundingBoxesVolumeAction] From a9cd852b0c91a8efcc9b03a0f61a64206c662fea Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jul 2024 13:11:17 +0200 Subject: [PATCH 030/361] apply bucketMutating actions --- .../annotation/TSAnnotationService.scala | 26 +++++-- .../volume/VolumeTracingService.scala | 72 ++++++++----------- .../tracings/volume/VolumeUpdateActions.scala | 6 +- 3 files changed, 53 insertions(+), 51 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 626d6519141..460efe755ea 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -15,7 +15,9 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ } import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, + BucketMutatingVolumeUpdateAction, UpdateBucketVolumeAction, + VolumeTracingService, VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} @@ -88,7 +90,8 @@ case class AnnotationWithTracings(annotation: AnnotationProto, } class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, - tracingDataStore: TracingDataStore) + tracingDataStore: TracingDataStore, + volumeTracingService: VolumeTracingService) extends KeyValueStoreImplicits { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = @@ -109,11 +112,22 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, previousVersion: Long, - userToken: Option[String]): Fox[Unit] = - // TODO apply some updates directly? transform to compact? - tracingDataStore.annotationUpdates.put(annotationId, - updateActionGroup.version, - preprocessActionsForStorage(updateActionGroup)) + userToken: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- tracingDataStore.annotationUpdates.put(annotationId, + updateActionGroup.version, + preprocessActionsForStorage(updateActionGroup)) + bucketMutatingActions = findBucketMutatingActions(updateActionGroup) + _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( + volumeTracingService + .applyBucketMutatingActions(bucketMutatingActions, previousVersion, updateActionGroup.version, userToken)) + } yield () + + private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = + updateActionGroup.actions.flatMap { + case a: BucketMutatingVolumeUpdateAction => Some(a) + case _ => None + } private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { val actionsWithInfo = updateActionGroup.actions.map( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index e13f66a5bc7..0569a4c261c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -116,55 +116,41 @@ class VolumeTracingService @Inject()( private def applyUpdateOn(tracing: VolumeTracing, update: ApplyableVolumeUpdateAction): VolumeTracing = ??? - def handleUpdateGroup(tracingId: String, - updateGroup: UpdateActionGroup, - previousVersion: Long, - userToken: Option[String]): Fox[Unit] = + def applyBucketMutatingActions(updateActions: List[BucketMutatingVolumeUpdateAction], + previousVersion: Long, + newVersion: Long, + userToken: Option[String]): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction - fallbackLayer <- getFallbackLayer(tracingId) + tracingId <- updateActions.headOption.map(_.actionTracingId).toFox + fallbackLayerOpt <- getFallbackLayer(tracingId) tracing <- find(tracingId) ?~> "tracing.notFound" - segmentIndexBuffer <- Fox.successful( - new VolumeSegmentIndexBuffer( - tracingId, - volumeSegmentIndexClient, - updateGroup.version, - remoteDatastoreClient, - fallbackLayer, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken - )) - updatedTracing: VolumeTracing <- updateGroup.actions.foldLeft(find(tracingId)) { (tracingFox, action) => - tracingFox.futureBox.flatMap { - case Full(tracing) => - action match { - case a: UpdateBucketVolumeAction => - if (tracing.getMappingIsEditable) { - Fox.failure("Cannot mutate volume data in annotation with editable mapping.") - } else - updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." - //case a: RevertToVersionVolumeAction => revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) - case a: DeleteSegmentDataVolumeAction => - if (!tracing.getHasSegmentIndex) { - Fox.failure("Cannot delete segment data for annotations without segment index.") - } else - deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version, userToken) ?~> "Failed to delete segment data." - case a: ApplyableVolumeUpdateAction => Fox.successful(applyUpdateOn(tracing, a)) - case _ => Fox.failure("Unknown action.") - } - case Empty => - Fox.empty - case f: Failure => - f.toFox - } + segmentIndexBuffer = new VolumeSegmentIndexBuffer( + tracingId, + volumeSegmentIndexClient, + newVersion, + remoteDatastoreClient, + fallbackLayerOpt, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), + userToken + ) + _ <- Fox.serialCombined(updateActions) { + case a: UpdateBucketVolumeAction => + if (tracing.getMappingIsEditable) { + Fox.failure("Cannot mutate volume data in annotation with editable mapping.") + } else + updateBucket(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to save volume data." + //case a: RevertToVersionVolumeAction => revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) + case a: DeleteSegmentDataVolumeAction => + if (!tracing.getHasSegmentIndex) { + Fox.failure("Cannot delete segment data for annotations without segment index.") + } else + deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, newVersion, userToken) ?~> "Failed to delete segment data." + case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() - _ <- save(updatedTracing.copy(version = updateGroup.version), Some(tracingId), updateGroup.version) - _ <- tracingDataStore.volumeUpdates.put(tracingId, - updateGroup.version, - updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp))) - } yield Fox.successful(()) + } yield () private def updateBucket(tracingId: String, volumeTracing: VolumeTracing, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 763dbbbd02f..f5230ec79a3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -32,6 +32,8 @@ trait ApplyableVolumeUpdateAction extends VolumeUpdateAction { def applyOn(tracing: VolumeTracing): VolumeTracing } +trait BucketMutatingVolumeUpdateAction extends VolumeUpdateAction + case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, mag: Vec3Int, @@ -41,7 +43,7 @@ case class UpdateBucketVolumeAction(position: Vec3Int, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends VolumeUpdateAction { + extends BucketMutatingVolumeUpdateAction { lazy val data: Array[Byte] = Base64.getDecoder.decode(base64Data) override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -263,7 +265,7 @@ case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends ApplyableVolumeUpdateAction { + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) From 5ae6f466eca64264c648cf312255145e4f36980c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jul 2024 13:20:31 +0200 Subject: [PATCH 031/361] fixes --- .../annotation/AnnotationTransactionService.scala | 12 +++++------- .../annotation/TSAnnotationService.scala | 9 +++------ .../controllers/VolumeTracingController.scala | 4 ++-- .../tracings/volume/VolumeTracingService.scala | 8 ++------ .../tracings/volume/VolumeUpdateActions.scala | 3 ++- 5 files changed, 14 insertions(+), 22 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 3c7f5f48354..c993dd9bfaf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -117,9 +117,8 @@ class AnnotationTransactionService @Inject()( transactionGroupIndex: Int): Fox[Boolean] = handledGroupIdStore.contains(handledGroupKey(annotationId, transactionId, version, transactionGroupIndex)) - private def concatenateUpdateGroupsOfTransaction( - previousActionGroups: List[UpdateActionGroup], - lastActionGroup: UpdateActionGroup): UpdateActionGroup = + private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup], + lastActionGroup: UpdateActionGroup): UpdateActionGroup = if (previousActionGroups.isEmpty) lastActionGroup else { val allActionGroups = previousActionGroups :+ lastActionGroup @@ -148,9 +147,8 @@ class AnnotationTransactionService @Inject()( } // Perform version check and commit the passed updates - private def commitUpdates(annotationId: String, - updateGroups: List[UpdateActionGroup], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Long] = for { _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) currentCommittedVersion: Fox[Long] = annotationService.currentVersion(annotationId) @@ -158,7 +156,7 @@ class AnnotationTransactionService @Inject()( previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { for { - _ <- annotationService.handleUpdateGroup(annotationId, updateGroup, prevVersion, userToken) + _ <- annotationService.handleUpdateGroup(annotationId, updateGroup, userToken) _ <- saveToHandledGroupIdStore(annotationId, updateGroup.transactionId, updateGroup.version, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 460efe755ea..ef08058b404 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -109,18 +109,15 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def currentVersion(annotationId: String): Fox[Long] = ??? - def handleUpdateGroup(annotationId: String, - updateActionGroup: UpdateActionGroup, - previousVersion: Long, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = + def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Unit] = for { _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, preprocessActionsForStorage(updateActionGroup)) bucketMutatingActions = findBucketMutatingActions(updateActionGroup) _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( - volumeTracingService - .applyBucketMutatingActions(bucketMutatingActions, previousVersion, updateActionGroup.version, userToken)) + volumeTracingService.applyBucketMutatingActions(bucketMutatingActions, updateActionGroup.version, userToken)) } yield () private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 0c733f0ed9a..160036b7c7f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -373,7 +373,7 @@ class VolumeTracingController @Inject()( isLocked = Some(true), actionTracingId = tracingId, actionTimestamp = Some(System.currentTimeMillis())) - _ <- tracingService.handleUpdateGroup( + /*_ <- tracingService.handleUpdateGroup( // TODO tracingId, UpdateActionGroup(tracing.version + 1, System.currentTimeMillis(), @@ -386,7 +386,7 @@ class VolumeTracingController @Inject()( 0), tracing.version, urlOrHeaderToken(token, request) - ) + )*/ infoJson <- editableMappingService.infoJson(tracingId = tracingId, editableMappingId = editableMappingId, editableMappingInfo = editableMappingInfo, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 0569a4c261c..c69c4c90615 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -15,7 +15,6 @@ import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest import com.scalableminds.webknossos.datastore.models.{ - AdditionalCoordinate, BucketPosition, UnsignedInteger, UnsignedIntegerArray, @@ -114,10 +113,7 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - private def applyUpdateOn(tracing: VolumeTracing, update: ApplyableVolumeUpdateAction): VolumeTracing = ??? - def applyBucketMutatingActions(updateActions: List[BucketMutatingVolumeUpdateAction], - previousVersion: Long, newVersion: Long, userToken: Option[String]): Fox[Unit] = for { @@ -888,7 +884,7 @@ class VolumeTracingService @Inject()( 1, 0 ) - _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) + // TODO _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) } yield Some(processedBucketCount) } @@ -975,7 +971,7 @@ class VolumeTracingService @Inject()( 1, 0 ) - _ <- handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken) + // TODO: _ <- handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken) } yield mergedVolume.largestSegmentId.toPositiveLong } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index f5230ec79a3..f12dd1a8948 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -265,7 +265,8 @@ case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends BucketMutatingVolumeUpdateAction { + extends BucketMutatingVolumeUpdateAction + with ApplyableVolumeUpdateAction { // TODO double-check that it is matched against both traits override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) From 7b3d7c25d1565c3d4e633ea921173d0d226bbf1d Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 30 Jul 2024 13:36:21 +0200 Subject: [PATCH 032/361] small cleanup --- .../annotation/AnnotationWithTracings.scala | 73 +++++++++++++++++++ .../annotation/TSAnnotationService.scala | 63 ---------------- .../tracings/volume/VolumeUpdateActions.scala | 2 +- 3 files changed, 74 insertions(+), 64 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala new file mode 100644 index 00000000000..ce13e61445a --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -0,0 +1,73 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction +import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction +import net.liftweb.common.{Box, Failure, Full} + +import scala.concurrent.ExecutionContext + +case class AnnotationWithTracings(annotation: AnnotationProto, + tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) { + + def getSkeleton(tracingId: String): Box[SkeletonTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + skeletonTracing <- tracingEither match { + case Left(st: SkeletonTracing) => Full(st) + case _ => Failure(f"Tried to access tracing $tracingId as skeleton, but is volume") + } + } yield skeletonTracing + + def getVolume(tracingId: String): Box[VolumeTracing] = + for { + tracingEither <- tracingsById.get(tracingId) + volumeTracing <- tracingEither match { + case Right(vt: VolumeTracing) => Full(vt) + case _ => Failure(f"Tried to access tracing $tracingId as volume, but is skeleton") + } + } yield volumeTracing + + def version: Long = annotation.version + + def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings( + annotation.copy( + layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, + a.layerName, + `type` = AnnotationLayerType.toProto(a.`type`))), + tracingsById) + + def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), tracingsById) + + def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l)), + tracingsById) + + def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), tracingsById) + + def incrementVersion: AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), tracingsById) + + def withVersion(newVersion: Long): AnnotationWithTracings = + AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) // TODO also update version in tracings? + + def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + skeletonTracing <- getSkeleton(a.actionTracingId) + updated = a.applyOn(skeletonTracing) + } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) + + def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + volumeTracing <- getVolume(a.actionTracingId) + updated = a.applyOn(volumeTracing) + } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated))) +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ef08058b404..31ff46b3bb4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -28,67 +28,6 @@ import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject import scala.concurrent.ExecutionContext -case class AnnotationWithTracings(annotation: AnnotationProto, - tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) { - - def getSkeleton(tracingId: String): Box[SkeletonTracing] = - for { - tracingEither <- tracingsById.get(tracingId) - skeletonTracing <- tracingEither match { - case Left(st: SkeletonTracing) => Full(st) - case _ => Failure(f"Tried to access tracing $tracingId as skeleton, but is volume") - } - } yield skeletonTracing - - def getVolume(tracingId: String): Box[VolumeTracing] = - for { - tracingEither <- tracingsById.get(tracingId) - volumeTracing <- tracingEither match { - case Right(vt: VolumeTracing) => Full(vt) - case _ => Failure(f"Tried to access tracing $tracingId as volume, but is skeleton") - } - } yield volumeTracing - - def version: Long = annotation.version - - def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings( - annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, - a.layerName, - `type` = AnnotationLayerType.toProto(a.`type`))), - tracingsById) - - def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), tracingsById) - - def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l)), - tracingsById) - - def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), tracingsById) - - def incrementVersion: AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), tracingsById) - - def withVersion(newVersion: Long): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) // TODO also update version in tracings? - - def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = - for { - skeletonTracing <- getSkeleton(a.actionTracingId) - updated = a.applyOn(skeletonTracing) - } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) - - def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = - for { - volumeTracing <- getVolume(a.actionTracingId) - updated = a.applyOn(volumeTracing) - } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated))) -} - class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore, volumeTracingService: VolumeTracingService) @@ -168,8 +107,6 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl annotationWithTracings.applySkeletonAction(a) case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) - case a: VolumeUpdateAction => - Fox.successful(annotationWithTracings) // TODO case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } } yield updated diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index f12dd1a8948..ad2e3eab783 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -266,7 +266,7 @@ case class DeleteSegmentVolumeAction(id: Long, actionAuthorId: Option[String] = None, info: Option[String] = None) extends BucketMutatingVolumeUpdateAction - with ApplyableVolumeUpdateAction { // TODO double-check that it is matched against both traits + with ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) From 62e671dc6f8077b2879e4a99e10971c7d1ea9ecb Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 21 Aug 2024 11:29:01 +0200 Subject: [PATCH 033/361] WIP: also maintain editable mappings when applying updates --- .../AnnotationTransactionService.scala | 4 +- .../annotation/AnnotationWithTracings.scala | 39 ++++++++++++++----- .../annotation/TSAnnotationService.scala | 19 ++++++--- 3 files changed, 45 insertions(+), 17 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index c993dd9bfaf..e0b8fd651de 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -140,7 +140,7 @@ class AnnotationTransactionService @Inject()( if (updateGroups.forall(_.transactionGroupCount == 1)) { commitUpdates(annotationId, updateGroups, userToken) } else { - updateGroups.foldLeft(annotationService.currentVersion(annotationId)) { + updateGroups.foldLeft(annotationService.currentMaterializableVersion(annotationId)) { (currentCommittedVersionFox, updateGroup) => handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup, userToken) } @@ -151,7 +151,7 @@ class AnnotationTransactionService @Inject()( implicit ec: ExecutionContext): Fox[Long] = for { _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) - currentCommittedVersion: Fox[Long] = annotationService.currentVersion(annotationId) + currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index ce13e61445a..9469458df93 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -2,17 +2,21 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingUpdater import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction import net.liftweb.common.{Box, Failure, Full} import scala.concurrent.ExecutionContext -case class AnnotationWithTracings(annotation: AnnotationProto, - tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]]) { +case class AnnotationWithTracings( + annotation: AnnotationProto, + tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]], + editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) { def getSkeleton(tracingId: String): Box[SkeletonTracing] = for { @@ -40,34 +44,49 @@ case class AnnotationWithTracings(annotation: AnnotationProto, layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, a.layerName, `type` = AnnotationLayerType.toProto(a.`type`))), - tracingsById) + tracingsById, + editableMappingsByTracingId + ) def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), tracingsById) + AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), + tracingsById, + editableMappingsByTracingId) def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = AnnotationWithTracings(annotation.copy(layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l)), - tracingsById) + tracingsById, + editableMappingsByTracingId) def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), tracingsById) + AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), + tracingsById, + editableMappingsByTracingId) def incrementVersion: AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), tracingsById) + AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), + tracingsById, + editableMappingsByTracingId) def withVersion(newVersion: Long): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById) // TODO also update version in tracings? + AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById, editableMappingsByTracingId) // TODO also update version in tracings? def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { skeletonTracing <- getSkeleton(a.actionTracingId) updated = a.applyOn(skeletonTracing) - } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Left(updated))) + } yield + AnnotationWithTracings(annotation, + tracingsById.updated(a.actionTracingId, Left(updated)), + editableMappingsByTracingId) def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { volumeTracing <- getVolume(a.actionTracingId) updated = a.applyOn(volumeTracing) - } yield AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated))) + } yield + AnnotationWithTracings(annotation, + tracingsById.updated(a.actionTracingId, Right(updated)), + editableMappingsByTracingId) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 31ff46b3bb4..04dfc10194b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -3,10 +3,14 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox -import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingUpdateAction, + EditableMappingUpdater +} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ CreateNodeSkeletonAction, DeleteNodeSkeletonAction, @@ -22,7 +26,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ } import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} -import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.common.{Empty, Full} import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject @@ -46,7 +50,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl )) } yield () - def currentVersion(annotationId: String): Fox[Long] = ??? + def currentMaterializableVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( implicit ec: ExecutionContext): Fox[Unit] = @@ -107,6 +112,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl annotationWithTracings.applySkeletonAction(a) case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) + case a: EditableMappingUpdateAction => + Fox.failure("not yet implemented") case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } } yield updated @@ -157,6 +164,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case u: VolumeUpdateAction => Some(u.actionTracingId) case _ => None } + // TODO fetch editable mappings + instantiate editableMappingUpdaters/buffers if there are updates for them + val editableMappingsMap: Map[String, (EditableMappingInfo, EditableMappingUpdater)] = Map.empty for { skeletonTracings <- Fox.serialCombined(skeletonTracingIds)( id => @@ -172,7 +181,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) .toMap - } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap) + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, editableMappingsMap) } private def applyUpdates(annotation: AnnotationWithTracings, From c4dc9577116c148d68fcc167375e2e84b1dc8119 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 21 Aug 2024 13:57:44 +0200 Subject: [PATCH 034/361] WIP: createTracing --- app/controllers/AnnotationController.scala | 19 ++----------------- app/models/annotation/AnnotationService.scala | 2 +- .../annotation/AnnotationUpdateActions.scala | 18 ++++++++++++++++-- .../annotation/AnnotationWithTracings.scala | 9 ++++++++- .../annotation/TSAnnotationService.scala | 10 ++++++++-- 5 files changed, 35 insertions(+), 23 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index a9497011191..b3160e1f665 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -7,13 +7,10 @@ import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.datastore.rpc.RPC +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import com.scalableminds.webknossos.tracingstore.tracings.{TracingIds, TracingType} import mail.{MailchimpClient, MailchimpTag} @@ -40,18 +37,6 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -case class AnnotationLayerParameters(typ: AnnotationLayerType, - fallbackLayerName: Option[String], - autoFallbackLayer: Boolean = false, - mappingName: Option[String] = None, - resolutionRestrictions: Option[ResolutionRestrictions], - name: Option[String], - additionalAxes: Option[Seq[AdditionalAxis]]) -object AnnotationLayerParameters { - implicit val jsonFormat: OFormat[AnnotationLayerParameters] = - Json.using[WithDefaultValues].format[AnnotationLayerParameters] -} - class AnnotationController @Inject()( annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 651473ba19d..c84a095d99f 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -32,6 +32,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayerLike => SegmentationLayer } import com.scalableminds.webknossos.datastore.rpc.RPC +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ @@ -41,7 +42,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeTracingDownsampling } import com.typesafe.scalalogging.LazyLogging -import controllers.AnnotationLayerParameters import models.annotation.AnnotationState._ import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.SavedTracingInformationHandler diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index e9fa3268163..860cb4f81b9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -1,13 +1,27 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions +import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json.{Json, OFormat} +case class AnnotationLayerParameters(typ: AnnotationLayerType, + fallbackLayerName: Option[String], + autoFallbackLayer: Boolean = false, + mappingName: Option[String] = None, + resolutionRestrictions: Option[ResolutionRestrictions], + name: Option[String], + additionalAxes: Option[Seq[AdditionalAxis]]) +object AnnotationLayerParameters { + implicit val jsonFormat: OFormat[AnnotationLayerParameters] = + Json.using[WithDefaultValues].format[AnnotationLayerParameters] +} + trait AnnotationUpdateAction extends UpdateAction -case class AddLayerAnnotationUpdateAction(layerName: String, +case class AddLayerAnnotationUpdateAction(layerParameters: AnnotationLayerParameters, tracingId: String, - `type`: AnnotationLayerType, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 9469458df93..dcf07d1d5f8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -6,7 +6,10 @@ import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappin import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingUpdater +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingUpdateAction, + EditableMappingUpdater +} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction import net.liftweb.common.{Box, Failure, Full} @@ -89,4 +92,8 @@ case class AnnotationWithTracings( AnnotationWithTracings(annotation, tracingsById.updated(a.actionTracingId, Right(updated)), editableMappingsByTracingId) + + def applyEditableMappingAction(a: EditableMappingUpdateAction)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + Fox.failure("not implemented yet") // TODO } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 04dfc10194b..c972d51358f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -100,7 +100,6 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => - // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) Fox.successful(annotationWithTracings.addTracing(a)) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotationWithTracings.deleteTracing(a)) @@ -113,11 +112,18 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => - Fox.failure("not yet implemented") + annotationWithTracings.applyEditableMappingAction(a) + // TODO make Mapping Editable + // Note: UpdateBucketVolumeActions are not handled here, but instead eagerly on saving. case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") } } yield updated + def createTracing(a: AddLayerAnnotationUpdateAction)( + implicit ec: ExecutionContext): Fox[Either[SkeletonTracing, VolumeTracing]] = + Fox.failure("not implemented") + // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) + def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = Json.obj( From ff579f86639f8244ab341b78ced6ba5fd31afbd0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 22 Aug 2024 14:02:28 +0200 Subject: [PATCH 035/361] adapt tests, imports --- app/controllers/AnnotationController.scala | 10 ++-- app/controllers/LegacyApiController.scala | 1 + test/backend/Dummies.scala | 2 + .../SkeletonUpdateActionsUnitTestSuite.scala | 48 ++++++++++++------- .../VolumeUpdateActionsUnitTestSuite.scala | 23 ++++++--- .../annotation/AnnotationWithTracings.scala | 10 ++-- 6 files changed, 61 insertions(+), 33 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index b3160e1f665..222a3d3a21b 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -6,12 +6,13 @@ import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContex import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType -import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType} -import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerStatistics, + AnnotationLayerType +} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters -import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import com.scalableminds.webknossos.tracingstore.tracings.{TracingIds, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} @@ -26,7 +27,6 @@ import models.user.time._ import models.user.{User, UserDAO, UserService} import net.liftweb.common.Box import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{URLSharing, UserAwareRequestLogging, WkEnv} diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 7a3c535ca88..2946ec1ddfa 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -5,6 +5,7 @@ import play.silhouette.api.actions.SecuredRequest import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import models.dataset.DatasetService import models.organization.OrganizationDAO diff --git a/test/backend/Dummies.scala b/test/backend/Dummies.scala index 8c982c11b99..eb108f94210 100644 --- a/test/backend/Dummies.scala +++ b/test/backend/Dummies.scala @@ -53,6 +53,8 @@ object Dummies { Some(true)) val treeGroup2: TreeGroup = TreeGroup("Axon 2", 2, Seq.empty, Some(true)) + val tracingId: String = "dummyTracingId" + val skeletonTracing: SkeletonTracing = SkeletonTracing( "dummy_dataset", Seq(tree1, tree2), diff --git a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala index 4799ecc342d..3905460e318 100644 --- a/test/backend/SkeletonUpdateActionsUnitTestSuite.scala +++ b/test/backend/SkeletonUpdateActionsUnitTestSuite.scala @@ -2,13 +2,12 @@ package backend import com.scalableminds.util.geometry.{Vec3Int, Vec3Double} import com.scalableminds.webknossos.datastore.SkeletonTracing._ -import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ import org.scalatestplus.play._ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { - private def applyUpdateAction(action: UpdateAction.SkeletonUpdateAction): SkeletonTracing = + private def applyUpdateAction(action: SkeletonUpdateAction): SkeletonTracing = action.applyOn(Dummies.skeletonTracing) def listConsistsOfLists[T](joinedList: Seq[T], sublist1: Seq[T], sublist2: Seq[T]): Boolean = @@ -29,7 +28,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { comments = List[UpdateActionComment](), groupId = None, isVisible = Option(true), - edgesAreVisible = Option(true) + edgesAreVisible = Option(true), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createTreeAction) @@ -46,7 +46,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteTreeSkeletonAction" should { "delete the specified tree" in { - val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1) + val deleteTreeAction = new DeleteTreeSkeletonAction(id = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteTreeAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length - 1) @@ -66,7 +66,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { name = "updated tree", branchPoints = List(UpdateActionBranchPoint(0, Dummies.timestamp)), comments = List[UpdateActionComment](), - groupId = None + groupId = None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeAction) @@ -81,7 +82,7 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MergeTreeSkeletonAction" should { "merge the specified trees" in { - val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2) + val mergeTreeAction = new MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId) val sourceTree = Dummies.tree1 val targetTree = Dummies.tree2 val result = applyUpdateAction(mergeTreeAction) @@ -102,7 +103,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "MoveTreeComponentSkeletonAction" should { "move the specified (separate) nodes" in { val moveTreeComponentSkeletonAction = - new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, sourceId = 3, targetId = 4) + new MoveTreeComponentSkeletonAction(Dummies.comp1Nodes.map(_.id).toList, + sourceId = 3, + targetId = 4, + actionTracingId = Dummies.tracingId) val result = moveTreeComponentSkeletonAction.applyOn(Dummies.componentSkeletonTracing) assert(result.trees.length == Dummies.componentSkeletonTracing.trees.length) @@ -120,7 +124,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "CreateEdgeSkeletonAction" should { "create a new edge in the right tree" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 1, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(createEdgeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -133,8 +138,10 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "DeleteEdgeSkeletonAction" should { "undo CreateEdgeSkeletonAction" in { - val createEdgeSkeletonAction = new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1) - val deleteEdgeSkeletonAction = new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1) + val createEdgeSkeletonAction = + new CreateEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) + val deleteEdgeSkeletonAction = + new DeleteEdgeSkeletonAction(source = 0, target = 7, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteEdgeSkeletonAction.applyOn(createEdgeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -154,7 +161,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -178,7 +186,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.bitDepth), Option(newNode.interpolation), treeId = 1, - Dummies.timestamp + Dummies.timestamp, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateNodeSkeletonAction) assert(result.trees.length == Dummies.skeletonTracing.trees.length) @@ -203,9 +212,11 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { Option(newNode.interpolation), treeId = 1, Dummies.timestamp, - None + None, + actionTracingId = Dummies.tracingId ) - val deleteNodeSkeletonAction = new DeleteNodeSkeletonAction(newNode.id, treeId = 1) + val deleteNodeSkeletonAction = + new DeleteNodeSkeletonAction(newNode.id, treeId = 1, actionTracingId = Dummies.tracingId) val result = deleteNodeSkeletonAction.applyOn(createNodeSkeletonAction.applyOn(Dummies.skeletonTracing)) assert(result == Dummies.skeletonTracing) } @@ -215,7 +226,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { "update a top level tree group" in { val updatedName = "Axon 2 updated" val updateTreeGroupsSkeletonAction = new UpdateTreeGroupsSkeletonAction( - List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())) + List(UpdateActionTreeGroup(updatedName, 2, Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -230,7 +242,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { UpdateActionTreeGroup(updatedNameTop, 1, Some(true), - List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))) + List(UpdateActionTreeGroup(updatedNameNested, 3, Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) @@ -253,7 +266,8 @@ class SkeletonUpdateActionsUnitTestSuite extends PlaySpec { editPosition, editRotation, zoomLevel, - userBoundingBox + userBoundingBox, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateTreeGroupsSkeletonAction) assert(result.trees == Dummies.skeletonTracing.trees) diff --git a/test/backend/VolumeUpdateActionsUnitTestSuite.scala b/test/backend/VolumeUpdateActionsUnitTestSuite.scala index 91459fc614b..35dd3f9b0b4 100644 --- a/test/backend/VolumeUpdateActionsUnitTestSuite.scala +++ b/test/backend/VolumeUpdateActionsUnitTestSuite.scala @@ -3,8 +3,8 @@ package backend import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.tracingstore.tracings.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, CreateSegmentVolumeAction, DeleteSegmentVolumeAction, UpdateActionSegmentGroup, @@ -15,7 +15,7 @@ import org.scalatestplus.play._ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplicits { - private def applyUpdateAction(action: UpdateAction.VolumeUpdateAction): VolumeTracing = + private def applyUpdateAction(action: ApplyableVolumeUpdateAction): VolumeTracing = action.applyOn(Dummies.volumeTracing) "CreateSegmentVolumeAction" should { @@ -26,7 +26,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic color = None, name = Some("aSegment"), groupId = Some(1), - creationTime = Some(Dummies.timestampLong) + creationTime = Some(Dummies.timestampLong), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(createSegmentAction) @@ -39,7 +40,7 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "DeleteSegmentVolumeAction" should { "delete the specified segment" in { - val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5) + val deleteSegmentAction = DeleteSegmentVolumeAction(id = 5, actionTracingId = Dummies.tracingId) val result = applyUpdateAction(deleteSegmentAction) assert(result.segments.length == Dummies.volumeTracing.segments.length - 1) @@ -58,7 +59,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic name = Some("aRenamedSegment"), color = None, creationTime = Some(Dummies.timestampLong), - groupId = None + groupId = None, + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentAction) @@ -76,7 +78,8 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic "update a top level segment group" in { val updatedName = "Segment Group 2 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())) + List(UpdateActionSegmentGroup(updatedName, 2, isExpanded = Some(true), List())), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) @@ -87,7 +90,13 @@ class VolumeUpdateActionsUnitTestSuite extends PlaySpec with ProtoGeometryImplic val updatedNameTop = "Segment Group 1 updated" val updatedNameNested = "Segment Group 3 updated" val updateSegmentGroupsVolumeAction = new UpdateSegmentGroupsVolumeAction( - List(UpdateActionSegmentGroup(updatedNameTop, 1, isExpanded = Some(true), List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))) + List( + UpdateActionSegmentGroup( + updatedNameTop, + 1, + isExpanded = Some(true), + List(UpdateActionSegmentGroup(updatedNameNested, 3, isExpanded = Some(false), List())))), + actionTracingId = Dummies.tracingId ) val result = applyUpdateAction(updateSegmentGroupsVolumeAction) assert(result.segments == Dummies.volumeTracing.segments) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index dcf07d1d5f8..83ca1ac6ece 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -5,7 +5,7 @@ import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingUpdateAction, EditableMappingUpdater @@ -44,9 +44,11 @@ case class AnnotationWithTracings( def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = AnnotationWithTracings( annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto(a.tracingId, - a.layerName, - `type` = AnnotationLayerType.toProto(a.`type`))), + layers = annotation.layers :+ AnnotationLayerProto( + a.tracingId, + a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), + `type` = AnnotationLayerType.toProto(a.layerParameters.typ) + )), tracingsById, editableMappingsByTracingId ) From 3e9265eadee8996b0f3bcfcee22702d0d6cfde19 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 10:09:43 +0200 Subject: [PATCH 036/361] initialize annotationProto. wip: call with annotationId --- app/models/annotation/AnnotationService.scala | 24 ++- .../WKRemoteTracingStoreClient.scala | 12 +- .../webknossos/datastore/rpc/RPCRequest.scala | 6 + .../annotation/TSAnnotationService.scala | 30 ++-- .../SkeletonTracingController.scala | 8 +- .../controllers/TSAnnotationController.scala | 12 +- .../controllers/TracingController.scala | 139 ++--------------- .../controllers/VolumeTracingController.scala | 145 ++++++++++++------ .../tracings/TracingSelector.scala | 2 +- .../tracings/TracingService.scala | 35 +++-- .../skeleton/SkeletonTracingService.scala | 59 +------ .../volume/VolumeTracingService.scala | 114 +++++++++----- ...alableminds.webknossos.tracingstore.routes | 66 ++++---- 13 files changed, 313 insertions(+), 339 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index d84364b475a..9401d9a8b5e 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -7,6 +7,7 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.geometry.{ @@ -205,6 +206,7 @@ class AnnotationService @Inject()( newAnnotationLayers <- createTracingsForExplorational( dataset, dataSource, + annotation._id, List(annotationLayerParameters), organizationName, annotation.annotationLayers) ?~> "annotation.createTracings.failed" @@ -218,6 +220,7 @@ class AnnotationService @Inject()( private def createTracingsForExplorational(dataset: Dataset, dataSource: DataSource, + annotationId: ObjectId, allAnnotationLayerParameters: List[AnnotationLayerParameters], datasetOrganizationName: String, existingAnnotationLayers: List[AnnotationLayer] = List())( @@ -351,6 +354,22 @@ class AnnotationService @Inject()( ) } + def createAndSaveAnnotationProto(annotationId: ObjectId, annotationLayers: List[AnnotationLayer]): Fox[Unit] = { + val layersProto = annotationLayers.map { l => + AnnotationLayerProto( + l.tracingId, + l.name, + AnnotationLayerType.toProto(l.typ) + ) + } + // todo pass right name, description here + val annotationProto = AnnotationProto(name = None, description = None, version = 0L, layers = layersProto) + for { + tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) + } yield () + } + for { /* Note that the tracings have redundant properties, with a precedence logic selecting a layer @@ -366,6 +385,7 @@ class AnnotationService @Inject()( precedenceProperties = oldPrecedenceLayer.map(extractPrecedenceProperties) newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(p => createAndSaveAnnotationLayer(p, precedenceProperties, dataStore)) + _ <- createAndSaveAnnotationProto(annotationId, newAnnotationLayers) } yield newAnnotationLayers } @@ -393,13 +413,15 @@ class AnnotationService @Inject()( dataSource <- datasetService.dataSourceFor(dataset) datasetOrganization <- organizationDAO.findOne(dataset._organization) usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.name) + newAnnotationId = ObjectId.generate annotationLayers <- createTracingsForExplorational( dataset, usableDataSource, + newAnnotationId, annotationLayerParameters, datasetOrganization.name) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" - annotation = Annotation(ObjectId.generate, datasetId, None, teamId, user._id, annotationLayers) + annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) } yield annotation diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 95f2c054f02..442a0b1a2ab 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -6,6 +6,7 @@ import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracings} import com.scalableminds.webknossos.datastore.models.VoxelSize @@ -23,6 +24,7 @@ import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import models.dataset.Dataset import net.liftweb.common.Box +import utils.ObjectId import scala.concurrent.ExecutionContext @@ -33,7 +35,7 @@ class WKRemoteTracingStoreClient( tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore)(implicit ec: ExecutionContext) extends LazyLogging { - def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" + private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) @@ -80,6 +82,14 @@ class WKRemoteTracingStoreClient( .postProtoWithJsonResponse[SkeletonTracings, List[Box[Option[String]]]](tracings) } + def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { + logger.debug("Called to save AnnotationProto." + baseInfo) + rpc(s"${tracingStore.url}/annotations/save") + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) + .postProto[AnnotationProto](annotationProto) + } + def duplicateSkeletonTracing(skeletonTracingId: String, versionString: Option[String] = None, isFromTask: Boolean = false, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index e1b36d40e42..1d99f8aab03 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -167,6 +167,12 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: performRequest } + def postProto[T <: GeneratedMessage](body: T): Fox[Unit] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> protobufMimeType).withBody(body.toByteArray).withMethod("POST") + performRequest.map(_ => ()) + } + def postProtoWithJsonResponse[T <: GeneratedMessage, J: Reads](body: T): Fox[J] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> protobufMimeType).withBody(body.toByteArray).withMethod("POST") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c972d51358f..dc333ee665f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -61,7 +61,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl preprocessActionsForStorage(updateActionGroup)) bucketMutatingActions = findBucketMutatingActions(updateActionGroup) _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( - volumeTracingService.applyBucketMutatingActions(bucketMutatingActions, updateActionGroup.version, userToken)) + volumeTracingService + .applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version, userToken)) } yield () private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = @@ -140,25 +141,34 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield Json.toJson(updateActionGroupsJs) } - def get(annotationId: String, version: Option[Long], applyUpdates: Boolean, userToken: Option[String])( + def get(annotationId: String, version: Option[Long], userToken: Option[String])( implicit ec: ExecutionContext): Fox[AnnotationProto] = + for { + withTracings <- getWithTracings(annotationId, version, List.empty, userToken) + } yield withTracings.annotation + + def getWithTracings(annotationId: String, + version: Option[Long], + requestedTracingIds: List[String], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) annotation = annotationWithVersion.value - updated <- if (applyUpdates) applyPendingUpdates(annotation, annotationId, version, userToken) - else Fox.successful(annotation) + updated <- applyPendingUpdates(annotation, annotationId, version, requestedTracingIds, userToken) } yield updated - private def applyPendingUpdates(annotation: AnnotationProto, - annotationId: String, - targetVersionOpt: Option[Long], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationProto] = + private def applyPendingUpdates( + annotation: AnnotationProto, + annotationId: String, + targetVersionOpt: Option[Long], + requestedTracingIds: List[String], + userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) - annotationWithTracings <- findTracingsForUpdates(annotation, updates) + annotationWithTracings <- findTracingsForUpdates(annotation, updates) // TODO pass requested tracing ids updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) - } yield updated.annotation + } yield updated private def findTracingsForUpdates(annotation: AnnotationProto, updates: List[UpdateAction])( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index b2d0640788f..6bb280fe104 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -50,6 +50,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer } def duplicate(token: Option[String], + annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], @@ -60,7 +61,12 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, + tracingId, + version, + applyUpdates = true, + userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 6739b3697d0..b6873f0fa73 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -25,12 +25,13 @@ class TSAnnotationController @Inject()( extends Controller with KeyValueStoreImplicits { - def initialize(token: Option[String], annotationId: String): Action[AnyContent] = - Action.async { implicit request => + def save(token: Option[String], annotationId: String): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { - _ <- tracingDataStore.annotations.put(annotationId, 0L, AnnotationProto(version = 0L)) + // TODO assert id does not already exist + _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) } yield Ok } } @@ -84,10 +85,7 @@ class TSAnnotationController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), urlOrHeaderToken(token, request)) { for { - annotationProto <- annotationService.get(annotationId, - version, - applyUpdates = false, - urlOrHeaderToken(token, request)) + annotationProto <- annotationService.get(annotationId, version, urlOrHeaderToken(token, request)) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index bad8308b096..580593fccd8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -72,18 +72,21 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def get(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { - implicit request => + def get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId, version, applyUpdates = true) ?~> Messages("tracing.notFound") - } yield { - Ok(tracing.toByteArray).as(protobufMimeType) - } + tracing <- tracingService.find(annotationId, + tracingId, + version, + applyUpdates = true, + userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) } } - } + } def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => @@ -98,127 +101,13 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def newestVersion(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), token) { - for { - newestVersion <- tracingService.currentVersion(tracingId) ?~> "annotation.getNewestVersion.failed" - } yield { - JsonOk(Json.obj("version" -> newestVersion)) - } - } - } - } - - def update(token: Option[String], tracingId: String): Action[List[UpdateActionGroup]] = - Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => + def newestVersion(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { + implicit request => log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { - val updateGroups = request.body - if (updateGroups.forall(_.transactionGroupCount == 1)) { - commitUpdates(tracingId, updateGroups, urlOrHeaderToken(token, request)).map(_ => Ok) - } else { - updateGroups - .foldLeft(tracingService.currentVersion(tracingId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(tracingId, - currentCommittedVersionFox, - updateGroup, - urlOrHeaderToken(token, request)) - } - .map(_ => Ok) - } - } - } - } - } - - private val transactionGroupExpiry: FiniteDuration = 24 hours - - private def handleUpdateGroupForTransaction(tracingId: String, - previousVersionFox: Fox[Long], - updateGroup: UpdateActionGroup, - userToken: Option[String]): Fox[Long] = - for { - previousCommittedVersion: Long <- previousVersionFox - result <- if (previousCommittedVersion + 1 == updateGroup.version) { - if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { - // Received the last group of this transaction - commitWithPending(tracingId, updateGroup, userToken) - } else { - tracingService - .saveUncommitted(tracingId, - updateGroup.transactionId, - updateGroup.transactionGroupIndex, - updateGroup.version, - updateGroup, - transactionGroupExpiry) - .flatMap( - _ => - tracingService.saveToHandledGroupIdStore(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) - .map(_ => previousCommittedVersion) // no updates have been committed, do not yield version increase + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), token) { + Fox.successful(JsonOk(Json.obj("version" -> 0L))) // TODO remove in favor of annotation-wide } - } else { - failUnlessAlreadyHandled(updateGroup, tracingId, previousCommittedVersion) } - } yield result - - // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction - // and commit them all. - private def commitWithPending(tracingId: String, - updateGroup: UpdateActionGroup, - userToken: Option[String]): Fox[Long] = - for { - previousActionGroupsToCommit <- tracingService.getAllUncommittedFor(tracingId, updateGroup.transactionId) - _ <- bool2Fox( - previousActionGroupsToCommit - .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." - concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) - commitResult <- commitUpdates(tracingId, List(concatenatedGroup), userToken) - _ <- tracingService.removeAllUncommittedFor(tracingId, updateGroup.transactionId) - } yield commitResult - - private def concatenateUpdateGroupsOfTransaction(previousActionGroups: List[UpdateActionGroup], - lastActionGroup: UpdateActionGroup): UpdateActionGroup = - if (previousActionGroups.isEmpty) lastActionGroup - else { - val allActionGroups = previousActionGroups :+ lastActionGroup - UpdateActionGroup( - version = lastActionGroup.version, - timestamp = lastActionGroup.timestamp, - authorId = lastActionGroup.authorId, - actions = allActionGroups.flatMap(_.actions), - stats = lastActionGroup.stats, // the latest stats do count - info = lastActionGroup.info, // frontend sets this identically for all groups of transaction - transactionId = f"${lastActionGroup.transactionId}-concatenated", - transactionGroupCount = 1, - transactionGroupIndex = 0, - ) - } - - // Perform version check and commit the passed updates - private def commitUpdates(tracingId: String, - updateGroups: List[UpdateActionGroup], - userToken: Option[String]): Fox[Long] = ??? - - /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), - * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful - * despite the backend receiving it just fine. - */ - private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, - tracingId: String, - previousVersion: Long): Fox[Long] = { - val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" - for { - _ <- Fox.assertTrue( - tracingService.handledGroupIdStoreContains(tracingId, - updateGroup.transactionId, - updateGroup.version, - updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT - } yield updateGroup.version } def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 4f24fcdccb0..6b5671eddaf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -90,6 +90,7 @@ class VolumeTracingController @Inject()( tracings.tracings.toList.map(_.tracing) def initialData(token: Option[String], + annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]): Action[AnyContent] = @@ -99,10 +100,11 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) resolutions <- tracingService - .initializeWithData(tracingId, tracing, initialData, resolutionRestrictions, token) + .initializeWithData(annotationId, tracingId, tracing, initialData, resolutionRestrictions, token) .toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) @@ -130,23 +132,27 @@ class VolumeTracingController @Inject()( } } - def initialDataMultiple(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => + def initialDataMultiple(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = + Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - resolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData, token).toFox + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") + resolutions <- tracingService + .initializeWithDataMultiple(annotationId, tracingId, tracing, initialData, token) + .toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) } } } - } + } def allDataZip(token: Option[String], + annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], @@ -156,7 +162,11 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId, version) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, + tracingId, + version, + userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) @@ -173,12 +183,13 @@ class VolumeTracingController @Inject()( } } - def data(token: Option[String], tracingId: String): Action[List[WebknossosDataRequest]] = + def data(token: Option[String], annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) editableMappingService.volumeData(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) else tracingService.data(tracingId, tracing, request.body) @@ -194,6 +205,7 @@ class VolumeTracingController @Inject()( "[" + indices.mkString(", ") + "]" def duplicate(token: Option[String], + annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], @@ -207,7 +219,8 @@ class VolumeTracingController @Inject()( val userToken = urlOrHeaderToken(token, request) accessTokenService.validateAccess(UserAccessRequest.webknossos, userToken) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId, userToken = userToken) ?~> Messages( + "tracing.notFound") _ = logger.info(s"Duplicating volume tracing $tracingId...") datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) @@ -219,6 +232,7 @@ class VolumeTracingController @Inject()( newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt, userToken)) (newId, newTracing) <- tracingService.duplicate( + annotationId, tracingId, tracing, fromTask.getOrElse(false), @@ -237,15 +251,19 @@ class VolumeTracingController @Inject()( } } - def importVolumeData(token: Option[String], tracingId: String): Action[MultipartFormData[TemporaryFile]] = + def importVolumeData(token: Option[String], + annotationId: String, + tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - largestSegmentId <- tracingService.importVolumeData(tracingId, + largestSegmentId <- tracingService.importVolumeData(annotationId, + tracingId, tracing, zipFile, currentVersion, @@ -255,17 +273,22 @@ class VolumeTracingController @Inject()( } } - def addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean): Action[AnyContent] = + def addSegmentIndex(token: Option[String], + annotationId: String, + tracingId: String, + dryRun: Boolean): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") currentVersion <- tracingService.currentVersion(tracingId) before = Instant.now canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing, token) processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)( - tracingService.addSegmentIndex(tracingId, + tracingService.addSegmentIndex(annotationId, + tracingId, tracing, currentVersion, urlOrHeaderToken(token, request), @@ -296,17 +319,20 @@ class VolumeTracingController @Inject()( } } - def requestAdHocMesh(token: Option[String], tracingId: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(token: Option[String], + annotationId: String, + tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") (vertices, neighbors) <- if (tracing.getHasEditableMapping) editableMappingService.createAdHocMesh(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.createAdHocMesh(tracingId, request.body, urlOrHeaderToken(token, request)) + else tracingService.createAdHocMesh(annotationId, tracingId, request.body, urlOrHeaderToken(token, request)) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) @@ -331,21 +357,25 @@ class VolumeTracingController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - for { - positionOpt <- tracingService.findData(tracingId) - } yield { - Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) + def findData(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { + implicit request => + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + for { + positionOpt <- tracingService.findData(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + } yield { + Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) + } } - } } - def agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long): Action[AnyContent] = + def agglomerateSkeleton(token: Option[String], + annotationId: String, + tracingId: String, + agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) @@ -358,12 +388,12 @@ class VolumeTracingController @Inject()( } } - def makeMappingEditable(token: Option[String], tracingId: String): Action[AnyContent] = + def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" _ <- assertMappingIsNotLocked(tracing) _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" @@ -400,12 +430,12 @@ class VolumeTracingController @Inject()( private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" - def agglomerateGraphMinCut(token: Option[String], tracingId: String): Action[MinCutParameters] = + def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = Action.async(validateJson[MinCutParameters]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer, token) @@ -414,12 +444,14 @@ class VolumeTracingController @Inject()( } } - def agglomerateGraphNeighbors(token: Option[String], tracingId: String): Action[NeighborsParameters] = + def agglomerateGraphNeighbors(token: Option[String], + annotationId: String, + tracingId: String): Action[NeighborsParameters] = Action.async(validateJson[NeighborsParameters]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, @@ -430,11 +462,13 @@ class VolumeTracingController @Inject()( } } - def updateEditableMapping(token: Option[String], tracingId: String): Action[List[UpdateActionGroup]] = + def updateEditableMapping(token: Option[String], + annotationId: String, + tracingId: String): Action[List[UpdateActionGroup]] = Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracing.mappingName.toFox _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) @@ -460,12 +494,15 @@ class VolumeTracingController @Inject()( } } - def editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(token: Option[String], + annotationId: String, + tracingId: String, + version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- editableMappingService.getInfo(mappingName, @@ -481,12 +518,14 @@ class VolumeTracingController @Inject()( } } - def editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String): Action[ListOfLong] = + def editableMappingAgglomerateIdsForSegments(token: Option[String], + annotationId: String, + tracingId: String): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) editableMappingId <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( @@ -508,13 +547,14 @@ class VolumeTracingController @Inject()( } def editableMappingSegmentIdsForAgglomerate(token: Option[String], + annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService @@ -534,11 +574,13 @@ class VolumeTracingController @Inject()( } } - def getSegmentVolume(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(token: Option[String], + annotationId: String, + tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracingService.baseMappingName(tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentVolume(tracingId, @@ -552,11 +594,13 @@ class VolumeTracingController @Inject()( } } - def getSegmentBoundingBox(token: Option[String], tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(token: Option[String], + annotationId: String, + tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracingService.baseMappingName(tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentBoundingBox(tracingId, @@ -570,12 +614,15 @@ class VolumeTracingController @Inject()( } } - def getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = + def getSegmentIndex(token: Option[String], + annotationId: String, + tracingId: String, + segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - fallbackLayer <- tracingService.getFallbackLayer(tracingId) - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId, urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracingService.baseMappingName(tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala index 0329c57e34a..14598c9d5aa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala @@ -2,6 +2,6 @@ package com.scalableminds.webknossos.tracingstore.tracings import play.api.libs.json.{Json, OFormat} -case class TracingSelector(tracingId: String, version: Option[Long] = None) +case class TracingSelector(tracingId: String, version: Option[Long] = None) // TODO must pass annotation id object TracingSelector { implicit val jsonFormat: OFormat[TracingSelector] = Json.format[TracingSelector] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 7eacf130c2a..8bba698ee1b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,8 +1,13 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationWithTracings, + TSAnnotationService, + UpdateActionGroup +} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging @@ -39,6 +44,8 @@ trait TracingService[T <: GeneratedMessage] def tracingMigrationService: TracingMigrationService[T] + def annotationService: TSAnnotationService + def dummyTracing: T val handledGroupIdStore: TracingStoreRedisStore @@ -110,27 +117,21 @@ trait TracingService[T <: GeneratedMessage] def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) - def find(tracingId: String, + protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[T] + + def find(annotationId: String, + tracingId: String, version: Option[Long] = None, useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[T] = + applyUpdates: Boolean = false, + userToken: Option[String]): Fox[T] = if (tracingId == TracingIds.dummyTracingId) Fox.successful(dummyTracing) else { - val tracingFox = tracingStore.get(tracingId, version)(fromProtoBytes[T]).map(_.value) - tracingFox.flatMap { tracing => - val updatedTracing = if (applyUpdates) { - applyPendingUpdates(tracing, tracingId, version) - } else { - Fox.successful(tracing) - } - migrateTracing(updatedTracing, tracingId) - }.orElse { - if (useCache) - temporaryTracingStore.find(tracingId) - else - tracingFox - } + for { + annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), userToken) // TODO is applyUpdates still needed? + tracing <- takeTracing(annotation, annotationId) + } yield tracing } def findMultiple(selectors: List[Option[TracingSelector]], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index a70eb859f7d..15b77bf10f0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,7 +8,11 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.LayerUpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationWithTracings, + LayerUpdateAction, + TSAnnotationService +} import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats @@ -23,6 +27,7 @@ class SkeletonTracingService @Inject()( val handledGroupIdStore: TracingStoreRedisStore, val temporaryTracingIdStore: TracingStoreRedisStore, val uncommittedUpdatesStore: TracingStoreRedisStore, + val annotationService: TSAnnotationService, val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) extends TracingService[SkeletonTracing] with KeyValueStoreImplicits @@ -40,22 +45,8 @@ class SkeletonTracingService @Inject()( def currentVersion(tracing: SkeletonTracing): Long = tracing.version - override def applyPendingUpdates(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[SkeletonTracing] = { - val existingVersion = tracing.version - findDesiredOrNewestPossibleVersion(tracing, tracingId, desiredVersion).flatMap { newVersion => - if (newVersion > existingVersion) { - for { - pendingUpdates <- findPendingUpdates(tracingId, existingVersion, newVersion) - updatedTracing <- update(tracing, tracingId, pendingUpdates, newVersion) - _ <- save(updatedTracing, Some(tracingId), newVersion) - } yield updatedTracing - } else { - Full(tracing) - } - } - } + protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[SkeletonTracing] = + annotation.getSkeleton(tracingId) private def findDesiredOrNewestPossibleVersion(tracing: SkeletonTracing, tracingId: String, @@ -76,40 +67,6 @@ class SkeletonTracingService @Inject()( } } - private def findPendingUpdates(tracingId: String, - existingVersion: Long, - desiredVersion: Long): Fox[List[SkeletonUpdateAction]] = ??? - - private def applyUpdateOn(tracing: SkeletonTracing, update: LayerUpdateAction): SkeletonTracing = ??? - - private def update(tracing: SkeletonTracing, - tracingId: String, - updates: List[SkeletonUpdateAction], - newVersion: Long): Fox[SkeletonTracing] = { - def updateIter(tracingFox: Fox[SkeletonTracing], - remainingUpdates: List[SkeletonUpdateAction]): Fox[SkeletonTracing] = - tracingFox.futureBox.flatMap { - case Empty => Fox.empty - case Full(tracing) => - remainingUpdates match { - case List() => Fox.successful(tracing) - case RevertToVersionSkeletonAction(sourceVersion, tracingId, _, _, _) :: tail => - val sourceTracing = find(tracingId, Some(sourceVersion), useCache = false, applyUpdates = true) - updateIter(sourceTracing, tail) - case update :: tail => updateIter(Full(applyUpdateOn(tracing, update)), tail) - } - case _ => tracingFox - } - - updates match { - case List() => Full(tracing) - case _ :: _ => - for { - updated <- updateIter(Some(tracing), updates) - } yield updated.withVersion(newVersion) - } - } - def duplicate(tracing: SkeletonTracing, fromTask: Boolean, editPosition: Option[Vec3Int], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 95dae6636b7..efeef014b49 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -6,6 +6,7 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto @@ -22,7 +23,11 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationWithTracings, + TSAnnotationService, + UpdateActionGroup +} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -58,6 +63,7 @@ class VolumeTracingService @Inject()( editableMappingService: EditableMappingService, val temporaryTracingIdStore: TracingStoreRedisStore, val remoteDatastoreClient: TSRemoteDatastoreClient, + val annotationService: TSAnnotationService, val remoteWebknossosClient: TSRemoteWebknossosClient, val temporaryFileCreator: TemporaryFileCreator, val tracingMigrationService: VolumeTracingMigrationService, @@ -90,7 +96,8 @@ class VolumeTracingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private val fallbackLayerCache: AlfuCache[String, Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) + private val fallbackLayerCache: AlfuCache[(String, String, Option[String]), Option[RemoteFallbackLayer]] = AlfuCache( + maxCapacity = 100) override def currentVersion(tracingId: String): Fox[Long] = tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) @@ -113,15 +120,16 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - def applyBucketMutatingActions(updateActions: List[BucketMutatingVolumeUpdateAction], + def applyBucketMutatingActions(annotationId: String, + updateActions: List[BucketMutatingVolumeUpdateAction], newVersion: Long, userToken: Option[String]): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction tracingId <- updateActions.headOption.map(_.actionTracingId).toFox - fallbackLayerOpt <- getFallbackLayer(tracingId) - tracing <- find(tracingId) ?~> "tracing.notFound" + fallbackLayerOpt <- getFallbackLayer(annotationId, tracingId, userToken) + tracing <- find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -142,7 +150,13 @@ class VolumeTracingService @Inject()( if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") } else - deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, newVersion, userToken) ?~> "Failed to delete segment data." + deleteSegmentData(annotationId, + tracingId, + tracing, + a, + segmentIndexBuffer, + newVersion, + userToken = userToken) ?~> "Failed to delete segment data." case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() @@ -180,6 +194,9 @@ class VolumeTracingService @Inject()( } } yield volumeTracing + protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[VolumeTracing] = + annotation.getVolume(tracingId) + override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None @@ -188,7 +205,8 @@ class VolumeTracingService @Inject()( tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) else Fox.successful(tracing.mappingName) - private def deleteSegmentData(tracingId: String, + private def deleteSegmentData(annotationId: String, + tracingId: String, volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, @@ -208,7 +226,7 @@ class VolumeTracingService @Inject()( Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(resolution) for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, tracingId, @@ -258,7 +276,8 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } - private def revertToVolumeVersion(tracingId: String, + private def revertToVolumeVersion(annotationId: String, + tracingId: String, sourceVersion: Long, newVersion: Long, tracing: VolumeTracing, @@ -268,7 +287,7 @@ class VolumeTracingService @Inject()( val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, @@ -276,7 +295,7 @@ class VolumeTracingService @Inject()( fallbackLayer, dataLayer.additionalAxes, userToken) - sourceTracing <- find(tracingId, Some(sourceVersion)) + sourceTracing <- find(annotationId, tracingId, Some(sourceVersion), userToken = userToken) mappingName <- baseMappingName(sourceTracing) _ <- Fox.serialCombined(bucketStream) { case (bucketPosition, dataBeforeRevert, version) => @@ -319,7 +338,8 @@ class VolumeTracingService @Inject()( } yield sourceTracing } - def initializeWithDataMultiple(tracingId: String, + def initializeWithDataMultiple(annotationId: String, + tracingId: String, tracing: VolumeTracing, initialData: File, userToken: Option[String])(implicit mp: MessagesProvider): Fox[Set[Vec3Int]] = @@ -359,7 +379,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) destinationDataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -389,7 +409,8 @@ class VolumeTracingService @Inject()( } yield resolutions } - def initializeWithData(tracingId: String, + def initializeWithData(annotationId: String, + tracingId: String, tracing: VolumeTracing, initialData: File, resolutionRestrictions: ResolutionRestrictions, @@ -400,7 +421,7 @@ class VolumeTracingService @Inject()( val dataLayer = volumeTracingLayer(tracingId, tracing) val savedResolutions = new mutable.HashSet[Vec3Int]() for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) mappingName <- baseMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, @@ -493,7 +514,8 @@ class VolumeTracingService @Inject()( data <- binaryDataService.handleDataRequests(requests) } yield data - def duplicate(tracingId: String, + def duplicate(annotationId: String, + tracingId: String, sourceTracing: VolumeTracing, fromTask: Boolean, datasetBoundingBox: Option[BoundingBox], @@ -506,7 +528,7 @@ class VolumeTracingService @Inject()( val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) val tracingWithResolutionRestrictions = restrictMagList(tracingWithBB, resolutionRestrictions) for { - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer, userToken) newTracing = tracingWithResolutionRestrictions.copy( createdTimestamp = System.currentTimeMillis(), @@ -520,7 +542,7 @@ class VolumeTracingService @Inject()( ) _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "resolutionRestrictions.tooTight" newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(tracingId, sourceTracing, newId, newTracing, userToken) + _ <- duplicateData(annotationId, tracingId, sourceTracing, newId, newTracing, userToken) } yield (newId, newTracing) } @@ -540,7 +562,8 @@ class VolumeTracingService @Inject()( .withBoundingBox(datasetBoundingBox.get) } else tracing - private def duplicateData(sourceId: String, + private def duplicateData(annotationId: String, + sourceId: String, sourceTracing: VolumeTracing, destinationId: String, destinationTracing: VolumeTracing, @@ -550,7 +573,7 @@ class VolumeTracingService @Inject()( sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(sourceId) + fallbackLayer <- getFallbackLayer(annotationId, sourceId, userToken) segmentIndexBuffer = new VolumeSegmentIndexBuffer( destinationId, volumeSegmentIndexClient, @@ -645,11 +668,12 @@ class VolumeTracingService @Inject()( def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty - def createAdHocMesh(tracingId: String, + def createAdHocMesh(annotationId: String, + tracingId: String, request: WebknossosAdHocMeshRequest, userToken: Option[String]): Fox[(Array[Float], List[Int])] = for { - tracing <- find(tracingId) ?~> "tracing.notFound" + tracing <- find(annotationId: String, tracingId, userToken = userToken) ?~> "tracing.notFound" segmentationLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true, @@ -668,9 +692,9 @@ class VolumeTracingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def findData(tracingId: String): Fox[Option[Vec3Int]] = + def findData(annotationId: String, tracingId: String, userToken: Option[String]): Fox[Option[Vec3Int]] = for { - tracing <- find(tracingId) ?~> "tracing.notFound" + tracing <- find(annotationId: String, tracingId, userToken = userToken) ?~> "tracing.notFound" volumeLayer = volumeTracingLayer(tracingId, tracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { @@ -816,7 +840,7 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId) + fallbackLayer <- getFallbackLayer("dummyAnnotationId", tracingSelectors.head.tracingId, userToken) // TODO annotation id from selectors segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, @@ -842,7 +866,8 @@ class VolumeTracingService @Inject()( } } - def addSegmentIndex(tracingId: String, + def addSegmentIndex(annotationId: String, + tracingId: String, tracing: VolumeTracing, currentVersion: Long, userToken: Option[String], @@ -852,7 +877,7 @@ class VolumeTracingService @Inject()( isTemporaryTracing <- isTemporaryTracing(tracingId) sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) mappingName <- baseMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, @@ -899,7 +924,8 @@ class VolumeTracingService @Inject()( alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) } yield canHaveSegmentIndex && !alreadyHasSegmentIndex - def importVolumeData(tracingId: String, + def importVolumeData(annotationId: String, + tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int, @@ -930,7 +956,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(tracingId) + fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) mappingName <- baseMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, @@ -995,19 +1021,23 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot merge tracings with and without editable mappings") } - def getFallbackLayer(tracingId: String): Fox[Option[RemoteFallbackLayer]] = - fallbackLayerCache.getOrLoad(tracingId, t => getFallbackLayerFromWebknossos(t)) + def getFallbackLayer(annotationId: String, + tracingId: String, + userToken: Option[String]): Fox[Option[RemoteFallbackLayer]] = + fallbackLayerCache.getOrLoad((annotationId, tracingId, userToken), + t => getFallbackLayerFromWebknossos(t._1, t._2, t._3)) - private def getFallbackLayerFromWebknossos(tracingId: String) = Fox[Option[RemoteFallbackLayer]] { - for { - tracing <- find(tracingId) - dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) - dataSourceId = dataSource.id - fallbackLayerName = tracing.fallbackLayer - fallbackLayer = dataSource.dataLayers - .find(_.name == fallbackLayerName.getOrElse("")) - .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) - } yield fallbackLayer - } + private def getFallbackLayerFromWebknossos(annotationId: String, tracingId: String, userToken: Option[String]) = + Fox[Option[RemoteFallbackLayer]] { + for { + tracing <- find(annotationId, tracingId, userToken = userToken) + dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) + dataSourceId = dataSource.id + fallbackLayerName = tracing.fallbackLayer + fallbackLayer = dataSource.dataLayers + .find(_.name == fallbackLayerName.getOrElse("")) + .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) + } yield fallbackLayer + } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 195cba40b8b..e033c9315af 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -5,43 +5,42 @@ # Health endpoint GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health -POST /annotation/initialize @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.initialize(token: Option[String], annotationId: String) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(token: Option[String], annotationId: String) GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) POST /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) -POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) -POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /volume/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.newestVersion(token: Option[String], tracingId: String) -POST /volume/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.update(token: Option[String], tracingId: String) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], tracingId: String) -POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], tracingId: String) -POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long) -POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], tracingId: String) -POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean) -GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], tracingId: String) -GET /volume/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], tracingId: String, agglomerateId: Long) -POST /volume/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.makeMappingEditable(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphMinCut(token: Option[String], tracingId: String) -POST /volume/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphNeighbors(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], tracingId: String) -POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], tracingId: String) -POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(token: Option[String], persist: Boolean) -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) +POST /volume/:annotationId/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) +POST /volume/:annotationId/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], annotationId: String, tracingId: String) +GET /volume/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) +GET /volume/:annotationId/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.newestVersion(token: Option[String], annotationId: String, tracingId: String) +GET /volume/:annotationId/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +POST /volume/:annotationId/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /volume/:annotationId/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], annotationId: String, tracingId: String, segmentId: Long) +POST /volume/:annotationId/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], annotationId: String, tracingId: String, dryRun: Boolean) +GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], annotationId: String, tracingId: String) +GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) +POST /volume/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.makeMappingEditable(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphNeighbors(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], annotationId: String, tracingId: String) +POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) +POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(token: Option[String], persist: Boolean) +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) # Editable Mappings -POST /mapping/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], tracingId: String) -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]) -GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String) +POST /mapping/:annotationId/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], annotationId: String, tracingId: String) +GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) +GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) +POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String) # Zarr endpoints for volume annotations # Zarr version 2 @@ -76,9 +75,8 @@ POST /skeleton/saveMultiple @com.scalablemin POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(token: Option[String], persist: Boolean) POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(token: Option[String], persist: Boolean) -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], tracingId: String, version: Option[Long]) -GET /skeleton/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], tracingId: String) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) +GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) +GET /skeleton/:annotationId/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], annotationId: String, tracingId: String) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) -POST /skeleton/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.update(token: Option[String], tracingId: String) -POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From 0105e0e42e09bd4914bd7172039a4330d288d36e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 10:20:03 +0200 Subject: [PATCH 037/361] pass annotation id + token to more spots --- .../controllers/TracingController.scala | 8 +++- .../controllers/VolumeTracingController.scala | 13 +++--- .../tracings/TracingService.scala | 9 ++-- .../tracings/volume/TSFullMeshService.scala | 44 ++++++++++++------- .../VolumeSegmentStatisticsService.scala | 23 +++++----- .../volume/VolumeTracingDownsampling.scala | 7 +-- .../volume/VolumeTracingService.scala | 7 +-- 7 files changed, 69 insertions(+), 42 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 580593fccd8..a2db158e6a6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -93,7 +93,9 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { - tracings <- tracingService.findMultiple(request.body, applyUpdates = true) + tracings <- tracingService.findMultiple(request.body, + applyUpdates = true, + userToken = urlOrHeaderToken(token, request)) } yield { Ok(tracings.toByteArray).as(protobufMimeType) } @@ -115,7 +117,9 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C log() { accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { for { - tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( + tracingOpts <- tracingService.findMultiple(request.body, + applyUpdates = true, + userToken = urlOrHeaderToken(token, request)) ?~> Messages( "tracing.notFound") tracingsWithIds = tracingOpts.zip(request.body).flatMap { case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 6b5671eddaf..477f70a2c66 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -244,7 +244,8 @@ class VolumeTracingController @Inject()( newEditableMappingId, userToken ) - _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, tracingId, newTracing, userToken)) + _ <- Fox.runIfOptionTrue(downsample)( + tracingService.downsample(annotationId, newId, tracingId, newTracing, userToken)) } yield Ok(Json.toJson(newId)) } } @@ -342,11 +343,11 @@ class VolumeTracingController @Inject()( } } - def loadFullMeshStl(token: Option[String], tracingId: String): Action[FullMeshRequest] = + def loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], tracingId, request.body) ?~> "mesh.file.loadChunk.failed" + data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } } @@ -583,7 +584,8 @@ class VolumeTracingController @Inject()( tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracingService.baseMappingName(tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentVolume(tracingId, + volumeSegmentStatisticsService.getSegmentVolume(annotationId, + tracingId, segmentId, request.body.mag, mappingName, @@ -603,7 +605,8 @@ class VolumeTracingController @Inject()( tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) mappingName <- tracingService.baseMappingName(tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => - volumeSegmentStatisticsService.getSegmentBoundingBox(tracingId, + volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, + tracingId, segmentId, request.body.mag, mappingName, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 8bba698ee1b..bf59ca54551 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -136,11 +136,14 @@ trait TracingService[T <: GeneratedMessage] def findMultiple(selectors: List[Option[TracingSelector]], useCache: Boolean = true, - applyUpdates: Boolean = false): Fox[List[Option[T]]] = + applyUpdates: Boolean = false, + userToken: Option[String]): Fox[List[Option[T]]] = Fox.combined { selectors.map { - case Some(selector) => find(selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) - case None => Fox.successful(None) + case Some(selector) => + find("dummyAnnotationid", selector.tracingId, selector.version, useCache, applyUpdates, userToken = userToken) + .map(Some(_)) + case None => Fox.successful(None) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 8ccfcfc7033..726de0f8af9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -33,13 +33,13 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, with FullMeshHelper with LazyLogging { - def loadFor(token: Option[String], tracingId: String, fullMeshRequest: FullMeshRequest)( + def loadFor(token: Option[String], annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)( implicit ec: ExecutionContext): Fox[Array[Byte]] = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- volumeTracingService.find(annotationId, tracingId, userToken = token) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) loadFullMeshFromMeshfile(token, tracing, tracingId, fullMeshRequest) - else loadFullMeshFromAdHoc(token, tracing, tracingId, fullMeshRequest) + else loadFullMeshFromAdHoc(token, tracing, annotationId, tracingId, fullMeshRequest) } yield data private def loadFullMeshFromMeshfile( @@ -60,6 +60,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, private def loadFullMeshFromAdHoc(token: Option[String], tracing: VolumeTracing, + annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = for { @@ -68,16 +69,19 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, before = Instant.now voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId, token) ?~> "voxelSize.failedToFetch" verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) - getAllAdHocChunksWithSegmentIndex(token, tracing, tracingId, mag, voxelSize, fullMeshRequest) + getAllAdHocChunksWithSegmentIndex(token, annotationId, tracing, tracingId, mag, voxelSize, fullMeshRequest) else - getAllAdHocChunksWithNeighborLogic(token, - tracing, - tracingId, - mag, - voxelSize, - fullMeshRequest, - fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), - adHocChunkSize) + getAllAdHocChunksWithNeighborLogic( + token, + tracing, + annotationId, + tracingId, + mag, + voxelSize, + fullMeshRequest, + fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), + adHocChunkSize + ) encoded = verticesForChunks.map(adHocMeshToStl) array = combineEncodedChunksToStl(encoded) _ = logMeshingDuration(before, "ad-hoc meshing (tracingstore)", array.length) @@ -85,13 +89,14 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, private def getAllAdHocChunksWithSegmentIndex( token: Option[String], + annotationId: String, tracing: VolumeTracing, tracingId: String, mag: Vec3Int, voxelSize: VoxelSize, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) + fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId, userToken = token) mappingName <- volumeTracingService.baseMappingName(tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -124,13 +129,14 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates, findNeighbors = false ) - loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, annotationId, tracingId) } allVertices = vertexChunksWithNeighbors.map(_._1) } yield allVertices private def getAllAdHocChunksWithNeighborLogic(token: Option[String], tracing: VolumeTracing, + annotationId: String, tracingId: String, mag: Vec3Int, voxelSize: VoxelSize, @@ -153,12 +159,17 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates ) _ = visited += topLeft - (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, + tracing, + adHocMeshRequest, + annotationId, + tracingId) nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) _ = visited ++= nextPositions neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => getAllAdHocChunksWithNeighborLogic(token, tracing, + annotationId, tracingId, mag, voxelSize, @@ -173,8 +184,9 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, private def loadMeshChunkFromAdHoc(token: Option[String], tracing: VolumeTracing, adHocMeshRequest: WebknossosAdHocMeshRequest, + annotationId: String, tracingId: String): Fox[(Array[Float], List[Int])] = if (tracing.getHasEditableMapping) editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) - else volumeTracingService.createAdHocMesh(tracingId, adHocMeshRequest, token) + else volumeTracingService.createAdHocMesh(annotationId, tracingId, adHocMeshRequest, token) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index cb12c273f53..a185e2b3735 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -21,7 +21,8 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci with SegmentStatistics { // Returns the segment volume (=number of voxels) in the target mag - def getSegmentVolume(tracingId: String, + def getSegmentVolume(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], @@ -31,11 +32,12 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates, userToken), + getTypedDataForBucketPosition(annotationId, tracingId, userToken) ) - def getSegmentBoundingBox(tracingId: String, + def getSegmentBoundingBox(annotationId: String, + tracingId: String, segmentId: Long, mag: Vec3Int, mappingName: Option[String], @@ -45,16 +47,16 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci segmentId, mag, additionalCoordinates, - getBucketPositions(tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates, userToken), + getTypedDataForBucketPosition(annotationId, tracingId, userToken) ) - private def getTypedDataForBucketPosition(tracingId: String, userToken: Option[String])( + private def getTypedDataForBucketPosition(annotationId: String, tracingId: String, userToken: Option[String])( bucketPosition: Vec3Int, mag: Vec3Int, additionalCoordinates: Option[Seq[AdditionalCoordinate]]) = for { - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + tracing <- volumeTracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" bucketData <- getVolumeDataForPositions(tracing, tracingId, mag, @@ -67,13 +69,14 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci } yield dataTyped private def getBucketPositions( + annotationId: String, tracingId: String, mappingName: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]], userToken: Option[String])(segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext) = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId) - tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId, userToken) + tracing <- volumeTracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index bb2dd9f18a1..25f59bb4bda 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -77,7 +77,8 @@ trait VolumeTracingDownsampling protected def volumeSegmentIndexClient: FossilDBClient - protected def downsampleWithLayer(tracingId: String, + protected def downsampleWithLayer(annotationId: String, + tracingId: String, oldTracingId: String, tracing: VolumeTracing, dataLayer: VolumeTracingLayer, @@ -105,8 +106,8 @@ trait VolumeTracingDownsampling dataLayer) requiredMag } - fallbackLayer <- tracingService.getFallbackLayer(oldTracingId) // remote wk does not know the new id yet - tracing <- tracingService.find(tracingId) ?~> "tracing.notFound" + fallbackLayer <- tracingService.getFallbackLayer(annotationId, oldTracingId, userToken) // remote wk does not know the new id yet + tracing <- tracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, tracing.version, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index efeef014b49..f47bb54ba36 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -6,7 +6,6 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto @@ -651,12 +650,14 @@ class VolumeTracingService @Inject()( toCache) } yield id - def downsample(tracingId: String, + def downsample(annotationId: String, + tracingId: String, oldTracingId: String, tracing: VolumeTracing, userToken: Option[String]): Fox[Unit] = for { - resultingResolutions <- downsampleWithLayer(tracingId, + resultingResolutions <- downsampleWithLayer(annotationId, + tracingId, oldTracingId, tracing, volumeTracingLayer(tracingId, tracing), From e42c07132acb98c7906589bb81d3d5cdb721fe4c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 10:24:43 +0200 Subject: [PATCH 038/361] use annotationId also in zarr streaming --- ...VolumeTracingZarrStreamingController.scala | 80 ++++++++++++------- ...alableminds.webknossos.tracingstore.routes | 42 +++++----- 2 files changed, 72 insertions(+), 50 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 0466da5c31c..ddc2b73a5f9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -56,11 +56,15 @@ class VolumeTracingZarrStreamingController @Inject()( override def defaultErrorCode: Int = NOT_FOUND - def volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContent(token: Option[String], + annotationId: String, + tracingId: String, + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -75,11 +79,15 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContentJson(token: Option[String], + annotationId: String, + tracingId: String, + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -89,13 +97,15 @@ class VolumeTracingZarrStreamingController @Inject()( } def volumeTracingMagFolderContent(token: Option[String], + annotationId: String, tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -112,14 +122,15 @@ class VolumeTracingZarrStreamingController @Inject()( } def volumeTracingMagFolderContentJson(token: Option[String], + annotationId: String, tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -128,15 +139,14 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zArray(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => + def zArray(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND cubeLength = DataLayer.bucketLength @@ -162,17 +172,17 @@ class VolumeTracingZarrStreamingController @Inject()( order = ArrayOrder.F) } yield Ok(Json.toJson(zarrHeader)) } - } + } - def zarrJsonForMag(token: Option[String], tracingId: String, mag: String): Action[AnyContent] = Action.async { - implicit request => + def zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND additionalAxes = AdditionalAxis.fromProtos(tracing.additionalAxes) @@ -209,12 +219,13 @@ class VolumeTracingZarrStreamingController @Inject()( ) } yield Ok(Json.toJson(zarrHeader)) } - } - - def zGroup(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { - Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } + + def zGroup(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { + implicit request => + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) + } } /** @@ -224,11 +235,13 @@ class VolumeTracingZarrStreamingController @Inject()( */ def zAttrs( token: Option[String], + annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND @@ -241,11 +254,13 @@ class VolumeTracingZarrStreamingController @Inject()( def zarrJson( token: Option[String], + annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND @@ -259,13 +274,15 @@ class VolumeTracingZarrStreamingController @Inject()( } def zarrSource(token: Option[String], + annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), @@ -281,12 +298,17 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String): Action[AnyContent] = + def rawZarrCube(token: Option[String], + annotationId: String, + tracingId: String, + mag: String, + coordinates: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( + "tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index e033c9315af..9058706cf74 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -44,29 +44,29 @@ POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @c # Zarr endpoints for volume annotations # Zarr version 2 -GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(token: Option[String], tracingId: String) -GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(token: Option[String], annotationId: String, tracingId: String) +GET /volume/zarr/:annotationId/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(token: Option[String], annotationId: String, tracingId: String) +GET /volume/zarr/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(token: Option[String], annotationId: String, tracingId: String, mag: String) +GET /volume/zarr/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], annotationId: String, tracingId: String, mag: String, coordinates: String) # Zarr version 3 -GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(token: Option[String], tracingId: String) -GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(token: Option[String], tracingId: String, mag: String) -GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], tracingId: String, mag: String, coordinates: String) +GET /volume/zarr3_experimental/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(token: Option[String], annotationId: String, tracingId: String) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], annotationId: String, tracingId: String, mag: String, coordinates: String) # Skeleton tracings POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save(token: Option[String]) From 48628044d455dd3992dddc41a1de6a1dc7da367e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 10:29:35 +0200 Subject: [PATCH 039/361] remove some unused stuff --- .../skeleton/SkeletonTracingService.scala | 28 ++----------------- .../volume/VolumeTracingService.scala | 1 + 2 files changed, 3 insertions(+), 26 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 15b77bf10f0..8288aaa8016 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,15 +8,10 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.{ - AnnotationWithTracings, - LayerUpdateAction, - TSAnnotationService -} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationWithTracings, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats -import net.liftweb.common.{Box, Empty, Full} +import net.liftweb.common.{Box, Full} import play.api.i18n.MessagesProvider import scala.concurrent.ExecutionContext @@ -48,25 +43,6 @@ class SkeletonTracingService @Inject()( protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[SkeletonTracing] = annotation.getSkeleton(tracingId) - private def findDesiredOrNewestPossibleVersion(tracing: SkeletonTracing, - tracingId: String, - desiredVersion: Option[Long]): Fox[Long] = - /* - * Determines the newest saved version from the updates column. - * if there are no updates at all, assume tracing is brand new (possibly created from NML, - * hence the emptyFallbck tracing.version) - */ - for { - newestUpdateVersion <- tracingDataStore.skeletonUpdates.getVersion(tracingId, - mayBeEmpty = Some(true), - emptyFallback = Some(tracing.version)) - } yield { - desiredVersion match { - case None => newestUpdateVersion - case Some(desiredSome) => math.min(desiredSome, newestUpdateVersion) - } - } - def duplicate(tracing: SkeletonTracing, fromTask: Boolean, editPosition: Option[Vec3Int], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index f47bb54ba36..2e13a9ad3a1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -275,6 +275,7 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } + // TODO private def revertToVolumeVersion(annotationId: String, tracingId: String, sourceVersion: Long, From 1ce9e833dae70d9d5ab8586c7615803aacdc8e84 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 11:38:55 +0200 Subject: [PATCH 040/361] fix cyclic injection, fix loading skeleton + volume tracing proto --- .../WKRemoteTracingStoreClient.scala | 4 +- .../AnnotationTransactionService.scala | 44 +++++++++++- .../annotation/TSAnnotationService.scala | 71 +++++++------------ .../controllers/TSAnnotationController.scala | 1 + .../tracings/TracingService.scala | 12 +--- .../skeleton/SkeletonTracingService.scala | 16 ++++- .../volume/VolumeTracingService.scala | 16 ++++- 7 files changed, 99 insertions(+), 65 deletions(-) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 442a0b1a2ab..d6bcf96bcdd 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -84,10 +84,10 @@ class WKRemoteTracingStoreClient( def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { logger.debug("Called to save AnnotationProto." + baseInfo) - rpc(s"${tracingStore.url}/annotations/save") + rpc(s"${tracingStore.url}/tracings/annotation/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("annotationId" -> annotationId.toString) - .postProto[AnnotationProto](annotationProto) + .postProto[AnnotationProto](annotationProto) // TODO why didn’t the failure bubble up? } def duplicateSkeletonTracing(skeletonTracingId: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index e0b8fd651de..4301d4508d2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -3,6 +3,12 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + BucketMutatingVolumeUpdateAction, + UpdateBucketVolumeAction, + VolumeTracingService +} import play.api.http.Status.CONFLICT import play.api.libs.json.Json @@ -13,7 +19,10 @@ import scala.concurrent.duration._ class AnnotationTransactionService @Inject()( handledGroupIdStore: TracingStoreRedisStore, // TODO: instantiate here rather than with injection, give fix namespace prefix? uncommittedUpdatesStore: TracingStoreRedisStore, - annotationService: TSAnnotationService) { + volumeTracingService: VolumeTracingService, + tracingDataStore: TracingDataStore, + annotationService: TSAnnotationService) + extends KeyValueStoreImplicits { private val transactionGroupExpiry: FiniteDuration = 24 hours private val handledGroupCacheExpiry: FiniteDuration = 24 hours @@ -156,7 +165,7 @@ class AnnotationTransactionService @Inject()( previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { for { - _ <- annotationService.handleUpdateGroup(annotationId, updateGroup, userToken) + _ <- handleUpdateGroup(annotationId, updateGroup, userToken) _ <- saveToHandledGroupIdStore(annotationId, updateGroup.transactionId, updateGroup.version, @@ -167,6 +176,37 @@ class AnnotationTransactionService @Inject()( } } yield newVersion + def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- tracingDataStore.annotationUpdates.put(annotationId, + updateActionGroup.version, + preprocessActionsForStorage(updateActionGroup)) + bucketMutatingActions = findBucketMutatingActions(updateActionGroup) + _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( + volumeTracingService + .applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version, userToken)) + } yield () + + private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = + updateActionGroup.actions.flatMap { + case a: BucketMutatingVolumeUpdateAction => Some(a) + case _ => None + } + + private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { + val actionsWithInfo = updateActionGroup.actions.map( + _.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { + case Nil => List[UpdateAction]() + //to the first action in the group, attach the group's info + case first :: rest => first.addInfo(updateActionGroup.info) :: rest + } + actionsWithInfo.map { + case a: UpdateBucketVolumeAction => a.transformToCompact // TODO or not? + case a => a + } + } + /* If this update group has already been “handled” (successfully saved as either committed or uncommitted), * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful * despite the backend receiving it just fine. diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index dc333ee665f..254a5e0ce40 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -19,8 +19,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ } import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, - BucketMutatingVolumeUpdateAction, - UpdateBucketVolumeAction, VolumeTracingService, VolumeUpdateAction } @@ -33,8 +31,7 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, - tracingDataStore: TracingDataStore, - volumeTracingService: VolumeTracingService) + tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = @@ -53,37 +50,6 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def currentMaterializableVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Unit] = - for { - _ <- tracingDataStore.annotationUpdates.put(annotationId, - updateActionGroup.version, - preprocessActionsForStorage(updateActionGroup)) - bucketMutatingActions = findBucketMutatingActions(updateActionGroup) - _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( - volumeTracingService - .applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version, userToken)) - } yield () - - private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = - updateActionGroup.actions.flatMap { - case a: BucketMutatingVolumeUpdateAction => Some(a) - case _ => None - } - - private def preprocessActionsForStorage(updateActionGroup: UpdateActionGroup): List[UpdateAction] = { - val actionsWithInfo = updateActionGroup.actions.map( - _.addTimestamp(updateActionGroup.timestamp).addAuthorId(updateActionGroup.authorId)) match { - case Nil => List[UpdateAction]() - //to the first action in the group, attach the group's info - case first :: rest => first.addInfo(updateActionGroup.info) :: rest - } - actionsWithInfo.map { - case a: UpdateBucketVolumeAction => a.transformToCompact // TODO or not? - case a => a - } - } - private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( implicit ec: ExecutionContext): Fox[List[UpdateAction]] = if (desiredVersion == existingVersion) Fox.successful(List()) @@ -144,42 +110,55 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl def get(annotationId: String, version: Option[Long], userToken: Option[String])( implicit ec: ExecutionContext): Fox[AnnotationProto] = for { - withTracings <- getWithTracings(annotationId, version, List.empty, userToken) + withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, userToken) } yield withTracings.annotation def getWithTracings(annotationId: String, version: Option[Long], - requestedTracingIds: List[String], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String], userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) annotation = annotationWithVersion.value - updated <- applyPendingUpdates(annotation, annotationId, version, requestedTracingIds, userToken) + updated <- applyPendingUpdates(annotation, + annotationId, + version, + requestedSkeletonTracingIds, + requestedVolumeTracingIds, + userToken) } yield updated private def applyPendingUpdates( annotation: AnnotationProto, annotationId: String, targetVersionOpt: Option[Long], - requestedTracingIds: List[String], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String], userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) - annotationWithTracings <- findTracingsForUpdates(annotation, updates) // TODO pass requested tracing ids + annotationWithTracings <- findTracingsForUpdates(annotation, + updates, + requestedSkeletonTracingIds, + requestedVolumeTracingIds) updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) } yield updated - private def findTracingsForUpdates(annotation: AnnotationProto, updates: List[UpdateAction])( - implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { - val skeletonTracingIds = updates.flatMap { + private def findTracingsForUpdates( + annotation: AnnotationProto, + updates: List[UpdateAction], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + val skeletonTracingIds = (updates.flatMap { case u: SkeletonUpdateAction => Some(u.actionTracingId) case _ => None - } - val volumeTracingIds = updates.flatMap { + } ++ requestedSkeletonTracingIds).distinct + val volumeTracingIds = (updates.flatMap { case u: VolumeUpdateAction => Some(u.actionTracingId) case _ => None - } + } ++ requestedVolumeTracingIds).distinct // TODO fetch editable mappings + instantiate editableMappingUpdaters/buffers if there are updates for them val editableMappingsMap: Map[String, (EditableMappingInfo, EditableMappingUpdater)] = Map.empty for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index b6873f0fa73..2245180a963 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -32,6 +32,7 @@ class TSAnnotationController @Inject()( for { // TODO assert id does not already exist _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) + _ = logger.info(s"stored annotationProto for $annotationId") } yield Ok } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index bf59ca54551..7250b28eb75 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -117,22 +117,12 @@ trait TracingService[T <: GeneratedMessage] def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) - protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[T] - def find(annotationId: String, tracingId: String, version: Option[Long] = None, useCache: Boolean = true, applyUpdates: Boolean = false, - userToken: Option[String]): Fox[T] = - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(dummyTracing) - else { - for { - annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), userToken) // TODO is applyUpdates still needed? - tracing <- takeTracing(annotation, annotationId) - } yield tracing - } + userToken: Option[String]): Fox[T] def findMultiple(selectors: List[Option[TracingSelector]], useCache: Boolean = true, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 8288aaa8016..e07a99558da 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -40,8 +40,20 @@ class SkeletonTracingService @Inject()( def currentVersion(tracing: SkeletonTracing): Long = tracing.version - protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[SkeletonTracing] = - annotation.getSkeleton(tracingId) + def find(annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, + applyUpdates: Boolean = false, + userToken: Option[String]): Fox[SkeletonTracing] = + if (tracingId == TracingIds.dummyTracingId) + Fox.successful(dummyTracing) + else { + for { + annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty, userToken) // TODO is applyUpdates still needed? + tracing <- annotation.getSkeleton(tracingId) + } yield tracing + } def duplicate(tracing: SkeletonTracing, fromTask: Boolean, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 2e13a9ad3a1..b22c0acc604 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -193,8 +193,20 @@ class VolumeTracingService @Inject()( } } yield volumeTracing - protected def takeTracing(annotation: AnnotationWithTracings, tracingId: String): Box[VolumeTracing] = - annotation.getVolume(tracingId) + def find(annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, + applyUpdates: Boolean = false, + userToken: Option[String]): Fox[VolumeTracing] = + if (tracingId == TracingIds.dummyTracingId) + Fox.successful(dummyTracing) + else { + for { + annotation <- annotationService.getWithTracings(annotationId, version, List.empty, List(tracingId), userToken) // TODO is applyUpdates still needed? + tracing <- annotation.getVolume(tracingId) + } yield tracing + } override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None From 1124262b634f43dda7b68aef6b7d3d4ee04a1be3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 27 Aug 2024 11:50:45 +0200 Subject: [PATCH 041/361] request tracing from new api --- frontend/javascripts/admin/admin_rest_api.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 74159a81cb1..da21b1f6016 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -896,7 +896,7 @@ export async function getTracingForAnnotationType( const possibleVersionString = version != null ? `&version=${version}` : ""; const tracingArrayBuffer = await doWithToken((token) => Request.receiveArraybuffer( - `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?token=${token}${possibleVersionString}`, + `${annotation.tracingStore.url}/tracings/${tracingType}/${annotation.id}/${tracingId}?token=${token}${possibleVersionString}`, { headers: { Accept: "application/x-protobuf", From 9975bf2575eca8b223b25d715d50018c4ec090dc Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 28 Aug 2024 11:48:35 +0200 Subject: [PATCH 042/361] fix report tracing updates --- app/controllers/AnnotationController.scala | 16 ++++++++++------ app/controllers/LegacyApiController.scala | 2 +- .../WKRemoteTracingStoreController.scala | 11 ++++++----- conf/webknossos.latest.routes | 4 ++-- conf/webknossos.versioned.routes | 4 ++-- .../tracingstore/TSRemoteWebknossosClient.scala | 3 ++- .../annotation/TSAnnotationService.scala | 2 +- .../controllers/VolumeTracingController.scala | 2 +- 8 files changed, 25 insertions(+), 19 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 222a3d3a21b..683022523a1 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -76,7 +76,7 @@ class AnnotationController @Inject()( // For Task and Explorational annotations, id is an annotation id. For CompoundTask, id is a task id. For CompoundProject, id is a project id. For CompoundTaskType, id is a task type id id: String, // Timestamp in milliseconds (time at which the request is sent) - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { val notFoundMessage = if (request.identity.isEmpty) "annotation.notFound.considerLoggingIn" else "annotation.notFound" @@ -89,10 +89,14 @@ class AnnotationController @Inject()( js <- annotationService .publicWrites(annotation, request.identity, Some(restrictions)) ?~> "annotation.write.failed" _ <- Fox.runOptional(request.identity) { user => - if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { - timeSpanService - .logUserInteractionIfTheyArePotentialContributor(Instant(timestamp), user, annotation) // log time when a user starts working - } else Fox.successful(()) + Fox.runOptional(timestamp) { timestampDefined => + if (typedTyp == AnnotationType.Task || typedTyp == AnnotationType.Explorational) { + timeSpanService.logUserInteractionIfTheyArePotentialContributor( + Instant(timestampDefined), + user, + annotation) // log time when a user starts working + } else Fox.successful(()) + } } _ = Fox.runOptional(request.identity)(user => userDAO.updateLastActivity(user._id)) _ = request.identity.foreach { user => @@ -104,7 +108,7 @@ class AnnotationController @Inject()( def infoWithoutType(id: String, // Timestamp in milliseconds (time at which the request is sent - timestamp: Long): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { for { annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 2946ec1ddfa..725a0362f45 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -107,7 +107,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, } yield adaptedResult } - def annotationInfoV4(typ: String, id: String, timestamp: Long): Action[AnyContent] = sil.SecuredAction.async { + def annotationInfoV4(typ: String, id: String, timestamp: Option[Long]): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { _ <- Fox.successful(logVersioned(request)) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 6d4ac54be1a..14b5df23000 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -25,7 +25,7 @@ import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} -import utils.WkConf +import utils.{ObjectId, WkConf} import scala.concurrent.ExecutionContext @@ -56,12 +56,13 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore tracingStoreService.validateAccess(name, key) { _ => val report = request.body for { - annotation <- annotationDAO.findOneByTracingId(report.tracingId) + annotationId <- ObjectId.fromString(report.annotationId) + annotation <- annotationDAO.findOne(annotationId) _ <- ensureAnnotationNotFinished(annotation) _ <- annotationDAO.updateModified(annotation._id, Instant.now) - _ <- Fox.runOptional(report.statistics) { statistics => - annotationLayerDAO.updateStatistics(annotation._id, report.tracingId, statistics) - } + /*_ <- Fox.runOptional(report.statistics) { statistics => + annotationLayerDAO.updateStatistics(annotation._id, annotationId, statistics) + }*/ // TODO stats per tracing id userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = (report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges) _ <- Fox.runOptional(userBox)(user => diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 0786fb5ac2c..c7a1babaefc 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -142,7 +142,7 @@ PUT /annotations/:typ/:id/reset PATCH /annotations/:typ/:id/transfer controllers.AnnotationController.transfer(typ: String, id: String) PATCH /annotations/:typ/:id/editLockedState controllers.AnnotationController.editLockedState(typ: String, id: String, isLockedByOwner: Boolean) -GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Long) +GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) PATCH /annotations/:id/makeHybrid controllers.AnnotationController.makeHybridWithoutType(id: String, fallbackLayerName: Option[String]) PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) PATCH /annotations/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayerWithoutType(id: String) @@ -153,7 +153,7 @@ GET /annotations/:id/download POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) PATCH /annotations/addSegmentIndicesToAll controllers.AnnotationController.addSegmentIndicesToAll(parallelBatchCount: Int, dryRun: Boolean, skipTracings: Option[String]) -GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Long) +GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]) PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) PATCH /annotations/:typ/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayer(typ: String, id: String) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 8447aa2a9e6..28c9ca77314 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -38,7 +38,7 @@ GET /v4/datasets/:organizationName/:datasetName co GET /v4/datasets/:organizationName/:datasetName/isValidNewName controllers.LegacyApiController.assertValidNewNameV5(organizationName: String, datasetName: String) # v4: support changes to v5 -GET /v4/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Long) +GET /v4/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Option[Long]) PATCH /v4/annotations/:typ/:id/finish controllers.LegacyApiController.annotationFinishV4(typ: String, id: String, timestamp: Long) POST /v4/annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.LegacyApiController.annotationMergeV4(typ: String, id: String, mergedTyp: String, mergedId: String) PATCH /v4/annotations/:typ/:id/edit controllers.LegacyApiController.annotationEditV4(typ: String, id: String) @@ -64,7 +64,7 @@ GET /v3/datasets/:organizationName/:datasetName/isValidNewName co # v3: support changes to v5 PATCH /v3/annotations/:typ/:id/finish controllers.LegacyApiController.annotationFinishV4(typ: String, id: String, timestamp: Long) -GET /v3/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Long) +GET /v3/annotations/:typ/:id/info controllers.LegacyApiController.annotationInfoV4(typ: String, id: String, timestamp: Option[Long]) POST /v3/annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.LegacyApiController.annotationMergeV4(typ: String, id: String, mergedTyp: String, mergedId: String) PATCH /v3/annotations/:typ/:id/edit controllers.LegacyApiController.annotationEditV4(typ: String, id: String) POST /v3/annotations/:typ/:id/duplicate controllers.LegacyApiController.annotationDuplicateV4(typ: String, id: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 6174fb4be91..b229f2a6cbd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -19,7 +19,8 @@ import play.api.libs.ws.WSResponse import scala.concurrent.ExecutionContext -case class TracingUpdatesReport(tracingId: String, +case class TracingUpdatesReport(annotationId: String, + // TODO stats per tracing id? timestamps: List[Instant], statistics: Option[JsObject], significantChangesCount: Int, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 254a5e0ce40..e51feb2b819 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -40,7 +40,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl TracingUpdatesReport( annotationId, timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, + statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer significantChangesCount = updateGroups.map(_.significantChangesCount).sum, viewChangesCount = updateGroups.map(_.viewChangesCount).sum, userToken diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 477f70a2c66..8621dbada90 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -477,7 +477,7 @@ class VolumeTracingController @Inject()( updateGroup <- request.body.headOption.toFox _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" report = TracingUpdatesReport( - tracingId, + annotationId, // TODO integrate all of this into annotation update timestamps = List(Instant(updateGroup.timestamp)), statistics = None, significantChangesCount = updateGroup.actions.length, From 29a2f47c45511589af431369f876404da5adbdfc Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 28 Aug 2024 13:59:53 +0200 Subject: [PATCH 043/361] fix ambiguous update action keys --- .../annotation/TSAnnotationService.scala | 20 +++++++++++-------- .../annotation/UpdateActions.scala | 11 +++++----- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index e51feb2b819..b5102985d98 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -24,6 +24,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ } import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} +import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Full} import play.api.libs.json.{JsObject, JsValue, Json} @@ -32,7 +33,8 @@ import scala.concurrent.ExecutionContext class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, tracingDataStore: TracingDataStore) - extends KeyValueStoreImplicits { + extends KeyValueStoreImplicits + with LazyLogging { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = for { @@ -75,7 +77,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case a: UpdateMetadataAnnotationUpdateAction => Fox.successful(annotationWithTracings.updateMetadata(a)) case a: SkeletonUpdateAction => - annotationWithTracings.applySkeletonAction(a) + annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => @@ -119,14 +121,14 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl requestedVolumeTracingIds: List[String], userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" annotation = annotationWithVersion.value updated <- applyPendingUpdates(annotation, annotationId, version, requestedSkeletonTracingIds, requestedVolumeTracingIds, - userToken) + userToken) ?~> "applyUpdates.failed" } yield updated private def applyPendingUpdates( @@ -137,13 +139,13 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl requestedVolumeTracingIds: List[String], userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) - updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) + targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) ?~> "determineTargetVersion.failed" + updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" annotationWithTracings <- findTracingsForUpdates(annotation, updates, requestedSkeletonTracingIds, - requestedVolumeTracingIds) - updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) + requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" + updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) ?~> "applyUpdates.inner.failed" } yield updated private def findTracingsForUpdates( @@ -161,6 +163,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } ++ requestedVolumeTracingIds).distinct // TODO fetch editable mappings + instantiate editableMappingUpdaters/buffers if there are updates for them val editableMappingsMap: Map[String, (EditableMappingInfo, EditableMappingUpdater)] = Map.empty + logger.info(s"fetching volumes ${volumeTracingIds} and skeletons $skeletonTracingIds") for { skeletonTracings <- Fox.serialCombined(skeletonTracingIds)( id => @@ -170,6 +173,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl id => tracingDataStore.volumes .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) + _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) .toMap diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index ee0cfaf4820..d0e00c49819 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -93,7 +93,8 @@ object UpdateAction { case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCameraVolumeAction](jsonValue) + case "updateTdCameraSkeleton" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) // TODO deduplicate? + case "updateTdCameraVolume" => deserialize[UpdateTdCameraVolumeAction](jsonValue) case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) @@ -150,7 +151,7 @@ object UpdateAction { case s: UpdateTreeGroupsSkeletonAction => Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) case s: UpdateTracingSkeletonAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) + Json.obj("name" -> "updateSkeletonTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) case s: RevertToVersionSkeletonAction => Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionSkeletonAction.jsonFormat)) case s: UpdateTreeVisibilitySkeletonAction => @@ -169,13 +170,13 @@ object UpdateAction { Json.obj("name" -> "updateUserBoundingBoxVisibility", "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) case s: UpdateTdCameraSkeletonAction => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) + Json.obj("name" -> "updateTdCameraSkeleton", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) // Volume case s: UpdateBucketVolumeAction => Json.obj("name" -> "updateBucket", "value" -> Json.toJson(s)(UpdateBucketVolumeAction.jsonFormat)) case s: UpdateTracingVolumeAction => - Json.obj("name" -> "updateTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) + Json.obj("name" -> "updateVolumeTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) case s: UpdateUserBoundingBoxesVolumeAction => Json.obj("name" -> "updateUserBoundingBoxes", "value" -> Json.toJson(s)(UpdateUserBoundingBoxesVolumeAction.jsonFormat)) @@ -187,7 +188,7 @@ object UpdateAction { case s: ImportVolumeDataVolumeAction => Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeDataVolumeAction.jsonFormat)) case s: UpdateTdCameraVolumeAction => - Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraVolumeAction.jsonFormat)) + Json.obj("name" -> "updateTdCameraVolume", "value" -> Json.toJson(s)(UpdateTdCameraVolumeAction.jsonFormat)) case s: CreateSegmentVolumeAction => Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) case s: UpdateSegmentVolumeAction => From b26e3b29d52dd9802bd84d2509936e53f5d7afd1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 29 Aug 2024 16:49:05 +0200 Subject: [PATCH 044/361] adapt frontend to changed save route (only for skeleton; still buggy) --- .../oxalis/model/reducers/save_reducer.ts | 12 +++++++++++- .../javascripts/oxalis/model/sagas/save_saga.ts | 4 +++- .../oxalis/model/sagas/update_actions.ts | 13 +++++++++++-- frontend/javascripts/oxalis/store.ts | 4 ++-- 4 files changed, 27 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 7460f0e8de9..7239f5e3426 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -18,6 +18,7 @@ import { } from "oxalis/model/reducers/volumetracing_reducer_helpers"; import Date from "libs/date"; import * as Utils from "libs/utils"; +import { UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -164,7 +165,16 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { transactionGroupIndex, timestamp: Date.now(), authorId: activeUser.id, - actions, + actions: actions.map( + (innerAction) => + ({ + ...innerAction, + value: { + ...innerAction.value, + actionTracingId: action.tracingId, + }, + }) as UpdateActionWithTracingId, + ), stats, // Redux Action Log context for debugging purposes. info: actionLogInfo, diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 4a85d54f119..79b60aedc6f 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -178,6 +178,7 @@ export function* sendRequestToServer( const { version, type } = yield* select((state) => selectTracing(state, saveQueueType, tracingId), ); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; [compactedSaveQueue, versionIncrement] = addVersionNumbers(compactedSaveQueue, version); @@ -191,7 +192,8 @@ export function* sendRequestToServer( const startTime = Date.now(); yield* call( sendRequestWithToken, - `${tracingStoreUrl}/tracings/${type}/${tracingId}/update?token=`, + + `${tracingStoreUrl}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: compactedSaveQueue, diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 7df4a0179e2..5de4a15e402 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -78,6 +78,13 @@ export type UpdateAction = | UpdateMappingNameUpdateAction | SplitAgglomerateUpdateAction | MergeAgglomerateUpdateAction; + +export type UpdateActionWithTracingId = UpdateAction & { + value: UpdateAction["value"] & { + actionTracingId: string; + }; +}; + // This update action is only created in the frontend for display purposes type CreateTracingUpdateAction = { name: "createTracing"; @@ -107,6 +114,8 @@ type AddServerValuesFn = (arg0: T) => T & { type AsServerAction = ReturnType>; +// When the server delivers update actions (e.g., when requesting the version history +// of an annotation), ServerUpdateActions are sent which include some additional information. export type ServerUpdateAction = AsServerAction< | UpdateAction // These two actions are never sent by the frontend and, therefore, don't exist in the UpdateAction type @@ -260,7 +269,7 @@ export function updateSkeletonTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateSkeletonTracing", value: { activeNode: tracing.activeNodeId, editPosition, @@ -411,7 +420,7 @@ export function removeFallbackLayer() { } export function updateTdCamera() { return { - name: "updateTdCamera", + name: "updateTdCameraSkeleton", value: {}, } as const; } diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index f449ef51634..56489f14126 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -49,7 +49,7 @@ import type { } from "oxalis/constants"; import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; import type DiffableMap from "libs/diffable_map"; @@ -439,7 +439,7 @@ export type SaveQueueEntry = { version: number; timestamp: number; authorId: string; - actions: Array; + actions: Array; transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; From 44ca6129090f507f22e3c71b60a0376f415a7703 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Sep 2024 11:38:19 +0200 Subject: [PATCH 045/361] use annotationUpdates to get materializableVersion --- frontend/javascripts/oxalis/model/sagas/update_actions.ts | 2 +- .../tracingstore/annotation/TSAnnotationService.scala | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 209f399fc15..702a918260a 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -301,7 +301,7 @@ export function updateVolumeTracing( zoomLevel: number, ) { return { - name: "updateTracing", + name: "updateVolumeTracing", value: { activeSegmentId: tracing.activeCellId, editPosition: position, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index b5102985d98..f41824d1dfa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -50,6 +50,9 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield () def currentMaterializableVersion(annotationId: String): Fox[Long] = + tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + + def currentMaterializedVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( From 821377a624bf2a9d73737e70ef9f87ce70a8969e Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Sep 2024 13:25:24 +0200 Subject: [PATCH 046/361] update tracing proto version --- .../annotation/AnnotationTransactionService.scala | 7 ++++++- .../tracingstore/annotation/AnnotationWithTracings.scala | 9 +++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 4301d4508d2..3e0efeac89a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -9,6 +9,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ UpdateBucketVolumeAction, VolumeTracingService } +import com.typesafe.scalalogging.LazyLogging import play.api.http.Status.CONFLICT import play.api.libs.json.Json @@ -22,7 +23,8 @@ class AnnotationTransactionService @Inject()( volumeTracingService: VolumeTracingService, tracingDataStore: TracingDataStore, annotationService: TSAnnotationService) - extends KeyValueStoreImplicits { + extends KeyValueStoreImplicits + with LazyLogging { private val transactionGroupExpiry: FiniteDuration = 24 hours private val handledGroupCacheExpiry: FiniteDuration = 24 hours @@ -161,6 +163,9 @@ class AnnotationTransactionService @Inject()( for { _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) + _ = logger.info(s"trying to commit ${updateGroups + .map(_.actions.length) + .sum} actions in ${updateGroups.length} groups (versions ${updateGroups.map(_.version).mkString(",")}") newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 83ca1ac6ece..d1dd18cf044 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -74,8 +74,13 @@ case class AnnotationWithTracings( tracingsById, editableMappingsByTracingId) - def withVersion(newVersion: Long): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = newVersion), tracingsById, editableMappingsByTracingId) // TODO also update version in tracings? + def withVersion(newVersion: Long): AnnotationWithTracings = { + val tracingsUpdated = tracingsById.view.mapValues { + case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) + case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) + } + AnnotationWithTracings(annotation.copy(version = newVersion), tracingsUpdated.toMap, editableMappingsByTracingId) + } def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { From 6ea6d654337e777411ea39b11ba99da731f9dfc6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Sep 2024 09:22:11 +0200 Subject: [PATCH 047/361] fix volume saving (single, layer, brush+move only) --- .../oxalis/model/bucket_data_handling/wkstore_adapter.ts | 3 ++- .../model/helpers/compaction/compact_save_queue.ts | 5 ++++- .../javascripts/oxalis/model/reducers/save_reducer.ts | 7 +++++-- frontend/javascripts/oxalis/model/sagas/save_saga.ts | 3 +++ .../javascripts/oxalis/model/sagas/update_actions.ts | 2 +- frontend/javascripts/test/helpers/saveHelpers.ts | 4 +++- .../annotation/AnnotationTransactionService.scala | 9 ++++----- .../tracingstore/annotation/TSAnnotationService.scala | 5 ++++- .../tracingstore/annotation/UpdateActions.scala | 2 ++ .../tracings/volume/VolumeTracingService.scala | 6 ++++-- .../tracings/volume/VolumeUpdateActions.scala | 7 +++---- 11 files changed, 35 insertions(+), 18 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 4db8385e75b..963ca797e1c 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -110,7 +110,8 @@ export async function requestWithFallback( optLayerName || layerInfo.name }`; - const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; + const getTracingStoreUrl = () => + `${tracingStoreHost}/tracings/volume/${state.tracing.annotationId}/${layerInfo.name}`; const maybeVolumeTracing = "tracingId" in layerInfo && layerInfo.tracingId != null diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts index 6ab8bab4525..710ce9f273a 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts @@ -5,7 +5,10 @@ function removeAllButLastUpdateTracingAction(updateActionsBatches: Array batch.actions.length === 1 && batch.actions[0].name === "updateTracing", + (batch) => + batch.actions.length === 1 && + (batch.actions[0].name === "updateSkeletonTracing" || + batch.actions[0].name === "updateVolumeTracing"), ); return _.without(updateActionsBatches, ...updateTracingOnlyBatches.slice(0, -1)); } diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 7239f5e3426..a1d1ccf668e 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -18,7 +18,7 @@ import { } from "oxalis/model/reducers/volumetracing_reducer_helpers"; import Date from "libs/date"; import * as Utils from "libs/utils"; -import { UpdateActionWithTracingId } from "../sagas/update_actions"; +import type { UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -140,7 +140,10 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { return state; } // Only report tracing statistics, if a "real" update to the tracing happened - const stats = _.some(action.items, (ua) => ua.name !== "updateTracing") + const stats = _.some( + action.items, + (ua) => ua.name !== "updateSkeletonTracing" && ua.name !== "updateVolumeTracing", + ) ? getStats(state.tracing, action.saveQueueType, action.tracingId) : null; const { activeUser } = state; diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 3382afb0b45..b51dd0152dc 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -371,9 +371,12 @@ export function performDiffTracing( ); } + /* + TODO: restore this update action (decide how to handle it, does it belong to skeleton or volume or something else?) if (prevTdCamera !== tdCamera) { actions = actions.concat(updateTdCamera()); } + */ return actions; } diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 702a918260a..5f95cd263ad 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -420,7 +420,7 @@ export function removeFallbackLayer() { } export function updateTdCamera() { return { - name: "updateTdCameraSkeleton", + name: "updateTdCamera", value: {}, } as const; } diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 53ba1f35865..2672cb89f34 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -21,7 +21,9 @@ export function createSaveQueueFromUpdateActions( })); } export function withoutUpdateTracing(items: Array): Array { - return items.filter((item) => item.name !== "updateTracing"); + return items.filter( + (item) => item.name !== "updateSkeletonTracing" && item.name !== "updateVolumeTracing", + ); } export function withoutUpdateTree(items: Array): Array { return items.filter((item) => item.name !== "updateTree"); diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 3e0efeac89a..0738dd3c8c3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -181,12 +181,11 @@ class AnnotationTransactionService @Inject()( } } yield newVersion - def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( + private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( implicit ec: ExecutionContext): Fox[Unit] = for { - _ <- tracingDataStore.annotationUpdates.put(annotationId, - updateActionGroup.version, - preprocessActionsForStorage(updateActionGroup)) + updateActionsJson <- Fox.successful(Json.toJson(preprocessActionsForStorage(updateActionGroup))) + _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, updateActionsJson) bucketMutatingActions = findBucketMutatingActions(updateActionGroup) _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( volumeTracingService @@ -207,7 +206,7 @@ class AnnotationTransactionService @Inject()( case first :: rest => first.addInfo(updateActionGroup.info) :: rest } actionsWithInfo.map { - case a: UpdateBucketVolumeAction => a.transformToCompact // TODO or not? + case a: UpdateBucketVolumeAction => a.withoutBase64Data case a => a } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index f41824d1dfa..eb46397ca28 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -19,6 +19,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ } import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, + BucketMutatingVolumeUpdateAction, VolumeTracingService, VolumeUpdateAction } @@ -85,9 +86,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) + case _: BucketMutatingVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. // TODO make Mapping Editable // Note: UpdateBucketVolumeActions are not handled here, but instead eagerly on saving. - case _ => Fox.failure("Received unsupported AnnotationUpdateAction action") + case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index d0e00c49819..8c7d0e34cb8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -198,6 +198,8 @@ object UpdateAction { case s: UpdateSegmentGroupsVolumeAction => Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) + case s: UpdateMappingNameVolumeAction => + Json.obj("name" -> "updateMappingName", "value" -> Json.toJson(s)(UpdateMappingNameVolumeAction.jsonFormat)) // Editable Mapping case s: SplitAgglomerateUpdateAction => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b22c0acc604..b20bf1525ff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -45,6 +45,7 @@ import play.api.libs.json.{JsObject, JsValue, Json} import java.io._ import java.nio.file.Paths +import java.util.Base64 import java.util.zip.Deflater import scala.collection.mutable import scala.concurrent.ExecutionContext @@ -175,7 +176,8 @@ class VolumeTracingService @Inject()( action.additionalCoordinates) _ <- bool2Fox(!bucketPosition.hasNegativeComponent) ?~> s"Received a bucket at negative position ($bucketPosition), must be positive" dataLayer = volumeTracingLayer(tracingId, volumeTracing) - _ <- saveBucket(dataLayer, bucketPosition, action.data, updateGroupVersion) ?~> "failed to save bucket" + actionBucketData <- action.base64Data.map(Base64.getDecoder.decode).toFox + _ <- saveBucket(dataLayer, bucketPosition, actionBucketData, updateGroupVersion) ?~> "failed to save bucket" mappingName <- baseMappingName(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { @@ -183,7 +185,7 @@ class VolumeTracingService @Inject()( _ <- updateSegmentIndex( segmentIndexBuffer, bucketPosition, - action.data, + actionBucketData, previousBucketBytes, volumeTracing.elementClass, mappingName, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 303c8e0969e..d00a7707d10 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -37,22 +37,21 @@ trait BucketMutatingVolumeUpdateAction extends VolumeUpdateAction case class UpdateBucketVolumeAction(position: Vec3Int, cubeSize: Int, mag: Vec3Int, - base64Data: String, + base64Data: Option[String], additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) extends BucketMutatingVolumeUpdateAction { - lazy val data: Array[Byte] = Base64.getDecoder.decode(base64Data) override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - def transformToCompact: CompactVolumeUpdateAction = - CompactVolumeUpdateAction("updateBucket", Json.obj(), actionTracingId, actionTimestamp, actionAuthorId, info) + def withoutBase64Data: UpdateBucketVolumeAction = + this.copy(base64Data = None) } case class UpdateTracingVolumeAction( From fee3368c6c8c48e2fd8c2c1d303256007280ae14 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 13:24:33 +0200 Subject: [PATCH 048/361] fix typing --- .../compaction/compact_toggle_actions.ts | 1 + .../compaction/compact_update_actions.ts | 1 + .../oxalis/model/reducers/save_reducer.ts | 50 +++++++++------ .../oxalis/model/sagas/save_saga.ts | 14 ++--- .../javascripts/oxalis/view/version_entry.tsx | 9 +-- .../backend-snapshot-tests/annotations.e2e.ts | 7 ++- .../javascripts/test/helpers/saveHelpers.ts | 4 +- .../test/reducers/save_reducer.spec.ts | 18 +++--- .../test/sagas/saga_integration.spec.ts | 1 + .../javascripts/test/sagas/save_saga.spec.ts | 14 ++++- .../test/sagas/skeletontracing_saga.spec.ts | 62 ++++++++++++++++--- 11 files changed, 129 insertions(+), 52 deletions(-) diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index cc7ee5af199..fd989003fab 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -7,6 +7,7 @@ import _ from "lodash"; import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import type { UpdateAction, + UpdateActionWithTracingId, UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; import { updateTreeGroupVisibility, updateTreeVisibility } from "oxalis/model/sagas/update_actions"; diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index b16e490e5e8..c9e714a8229 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -7,6 +7,7 @@ import type { DeleteNodeUpdateAction, DeleteTreeUpdateAction, UpdateAction, + UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import { moveTreeComponent } from "oxalis/model/sagas/update_actions"; import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index a1d1ccf668e..13d9e7c38f3 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -18,7 +18,7 @@ import { } from "oxalis/model/reducers/volumetracing_reducer_helpers"; import Date from "libs/date"; import * as Utils from "libs/utils"; -import type { UpdateActionWithTracingId } from "../sagas/update_actions"; +import type { UpdateAction, UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -135,16 +135,19 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "PUSH_SAVE_QUEUE_TRANSACTION": { - const { items, transactionId } = action; + // Use `dispatchedAction` to better distinguish this variable from + // update actions. + const dispatchedAction = action; + const { items, transactionId } = dispatchedAction; if (items.length === 0) { return state; } // Only report tracing statistics, if a "real" update to the tracing happened const stats = _.some( - action.items, + dispatchedAction.items, (ua) => ua.name !== "updateSkeletonTracing" && ua.name !== "updateVolumeTracing", ) - ? getStats(state.tracing, action.saveQueueType, action.tracingId) + ? getStats(state.tracing, dispatchedAction.saveQueueType, dispatchedAction.tracingId) : null; const { activeUser } = state; if (activeUser == null) { @@ -153,12 +156,16 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { const updateActionChunks = _.chunk( items, - MAXIMUM_ACTION_COUNT_PER_BATCH[action.saveQueueType], + MAXIMUM_ACTION_COUNT_PER_BATCH[dispatchedAction.saveQueueType], ); const transactionGroupCount = updateActionChunks.length; const actionLogInfo = JSON.stringify(getActionLog().slice(-10)); - const oldQueue = selectQueue(state, action.saveQueueType, action.tracingId); + const oldQueue = selectQueue( + state, + dispatchedAction.saveQueueType, + dispatchedAction.tracingId, + ); const newQueue = oldQueue.concat( updateActionChunks.map((actions, transactionGroupIndex) => ({ // Placeholder, the version number will be updated before sending to the server @@ -168,16 +175,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { transactionGroupIndex, timestamp: Date.now(), authorId: activeUser.id, - actions: actions.map( - (innerAction) => - ({ - ...innerAction, - value: { - ...innerAction.value, - actionTracingId: action.tracingId, - }, - }) as UpdateActionWithTracingId, - ), + actions: addTracingIdToActions(actions, dispatchedAction.tracingId), stats, // Redux Action Log context for debugging purposes. info: actionLogInfo, @@ -189,7 +187,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // caught by the following check. If the bug appears again, we can investigate with more // details thanks to airbrake. if ( - action.saveQueueType === "skeleton" && + dispatchedAction.saveQueueType === "skeleton" && oldQueue.length > 0 && newQueue.length > 0 && newQueue.at(-1)?.actions.some((action) => NOT_IDEMPOTENT_ACTIONS.includes(action.name)) && @@ -205,7 +203,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); } - const newQueueObj = updateTracingDict(action, state.save.queue, newQueue); + const newQueueObj = updateTracingDict(dispatchedAction, state.save.queue, newQueue); return update(state, { save: { queue: { @@ -314,4 +312,20 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } } +export function addTracingIdToActions( + actions: UpdateAction[], + tracingId: string, +): UpdateActionWithTracingId[] { + return actions.map( + (innerAction) => + ({ + ...innerAction, + value: { + ...innerAction.value, + actionTracingId: tracingId, + }, + }) as UpdateActionWithTracingId, + ); +} + export default SaveReducer; diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index b51dd0152dc..e12165012f1 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -105,7 +105,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str // ignored (they will be picked up in the next iteration of this loop). // Otherwise, the risk of a high number of save-requests (see case 1) // would be present here, too (note the risk would be greater, because the - // user didn't use the save button which is usually accompanied a small pause). + // user didn't use the save button which is usually accompanied by a small pause). const itemCountToSave = forcePush ? Number.POSITIVE_INFINITY : yield* select((state) => selectQueue(state, saveQueueType, tracingId).length); @@ -175,9 +175,7 @@ export function* sendRequestToServer( const fullSaveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); const saveQueue = sliceAppropriateBatchCount(fullSaveQueue, saveQueueType); let compactedSaveQueue = compactSaveQueue(saveQueue); - const { version, type } = yield* select((state) => - selectTracing(state, saveQueueType, tracingId), - ); + const { version } = yield* select((state) => selectTracing(state, saveQueueType, tracingId)); const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; @@ -354,8 +352,8 @@ export function performDiffTracing( tracing: SkeletonTracing | VolumeTracing, prevFlycam: Flycam, flycam: Flycam, - prevTdCamera: CameraData, - tdCamera: CameraData, + _prevTdCamera: CameraData, + _tdCamera: CameraData, ): Array { let actions: Array = []; @@ -372,7 +370,7 @@ export function performDiffTracing( } /* - TODO: restore this update action (decide how to handle it, does it belong to skeleton or volume or something else?) + TODOp: restore this update action (decide how to handle it, does it belong to skeleton or volume or something else?) if (prevTdCamera !== tdCamera) { actions = actions.concat(updateTdCamera()); } @@ -401,7 +399,7 @@ export function* setupSavingForTracingType( /* Listen to changes to the annotation and derive UpdateActions from the old and new state. - The actual push to the server is done by the forked pushSaveQueueAsync saga. + The actual push to the server is done by the forked pushSaveQueueAsync saga. */ const saveQueueType = initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index c792f7171d6..4302438ded2 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -188,8 +188,9 @@ const descriptionFns: Record Descr description: `Merged the trees with id ${action.value.sourceId} and ${action.value.targetId}.`, icon: , }), - updateTracing: (): Description => updateTracingDescription, -}; + updateSkeletonTracing: (): Description => updateTracingDescription, + updateVolumeTracing: (): Description => updateTracingDescription, +} as const; function getDescriptionForSpecificBatch( actions: Array, @@ -200,8 +201,8 @@ function getDescriptionForSpecificBatch( if (firstAction.name !== type) { throw new Error("Type constraint violated"); } - - return descriptionFns[type](firstAction, actions.length); + const fn = descriptionFns[type]; + return fn(firstAction, actions.length); } // An update action batch can consist of more than one update action as a single user action diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index b698c52ae54..780739fc832 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -20,6 +20,8 @@ import * as api from "admin/admin_rest_api"; import generateDummyTrees from "oxalis/model/helpers/generate_dummy_trees"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; +import type { SaveQueueEntry } from "oxalis/store"; + const datasetId = { name: "confocal-multi_knossos", owningOrganization: "Organization_X", @@ -146,8 +148,7 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); }); -// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'queue' implicitly has an 'any' type. -async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue) { +async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue: SaveQueueEntry[]) { const skeletonTracing = getSkeletonDescriptor(explorational); if (skeletonTracing == null) throw new Error("No skeleton annotation present."); return sendRequestWithToken( @@ -173,6 +174,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { [UpdateActions.updateSkeletonTracing(initialSkeleton, [2, 3, 4], null, [1, 2, 3], 2)], ], 123456789, + createdExplorational.annotationLayers[0].tracingId, ), 0, ); @@ -207,6 +209,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], 123456789, + createdExplorational.annotationLayers[0].tracingId, ), 0, ); diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 2672cb89f34..09703d25e29 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,4 +1,5 @@ import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; import type { SaveQueueEntry } from "oxalis/store"; import dummyUser from "test/fixtures/dummy_user"; @@ -6,13 +7,14 @@ import dummyUser from "test/fixtures/dummy_user"; export function createSaveQueueFromUpdateActions( updateActions: UpdateAction[][], timestamp: number, + tracingId: string, stats: TracingStats | null = null, ): SaveQueueEntry[] { return updateActions.map((ua) => ({ version: -1, timestamp, stats, - actions: ua.slice(), + actions: addTracingIdToActions(ua, tracingId), info: "[]", transactionGroupCount: 1, authorId: dummyUser.id, diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 0a4b398f274..1ae173ee944 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -6,6 +6,7 @@ import type { SaveState } from "oxalis/store"; import type { APIUser } from "types/api_flow_types"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; import type { EmptyObject } from "types/globals"; + const TIMESTAMP = 1494695001688; const DateMock = { now: () => TIMESTAMP, @@ -19,6 +20,7 @@ const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer").default; const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions"); +const tracingId = "1234567890"; const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject } = { activeUser: dummyUser, save: { @@ -46,14 +48,18 @@ const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject }; test("Save should add update actions to the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue.skeleton, saveQueue); }); test("Save should add more update actions to the queue", (t) => { const getItems = (treeId: number) => [createEdge(treeId, 1, 2), createEdge(treeId, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([getItems(0), getItems(1)], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [getItems(0), getItems(1)], + TIMESTAMP, + tracingId, + ); const testState = SaveReducer( initialState, SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), @@ -65,9 +71,7 @@ test("Save should add more update actions to the queue", (t) => { t.deepEqual(newState.save.queue.skeleton, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { - // @ts-expect-error ts-migrate(7034) FIXME: Variable 'items' implicitly has type 'any[]' in so... Remove this comment to see the full error message - const items = []; - // @ts-expect-error ts-migrate(7005) FIXME: Variable 'items' implicitly has an 'any[]' type. + const items = [] as const; const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue.skeleton, []); @@ -75,7 +79,7 @@ test("Save should add zero update actions to the queue", (t) => { test("Save should remove one update actions from the queue", (t) => { const firstItem = [createEdge(0, 1, 2)]; const secondItem = [createEdge(1, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP, tracingId); const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(1, "skeleton"); @@ -86,7 +90,7 @@ test("Save should remove one update actions from the queue", (t) => { }); test("Save should remove zero update actions from the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(1, 2, 3)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(0, "skeleton"); let newState = SaveReducer(initialState, pushAction); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 771fb88b194..c74623c26d9 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -67,6 +67,7 @@ test.serial( ], ], TIMESTAMP, + "tracingId", getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, ); // Reset the info field which is just for debugging purposes diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 677568ef279..ed9f3146fa8 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -75,12 +75,13 @@ test("SaveSaga should compact multiple updateTracing update actions", (t) => { [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], ], TIMESTAMP, + tracingId, ); t.deepEqual(compactSaveQueue(saveQueue), [saveQueue[1]]); }); test("SaveSaga should send update actions", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); // setLastSaveTimestampAction @@ -113,6 +114,7 @@ test("SaveSaga should send request to server", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); saga.next(); @@ -137,6 +139,7 @@ test("SaveSaga should retry update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); @@ -170,6 +173,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => const saveQueue = createSaveQueueFromUpdateActions( [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], TIMESTAMP, + tracingId, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); saga.next(); @@ -206,7 +210,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => }); test("SaveSaga should send update actions right away and try to reach a state where all updates are saved", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); @@ -229,7 +233,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); @@ -253,6 +257,7 @@ test("SaveSaga should remove the correct update actions", (t) => { [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], ], TIMESTAMP, + tracingId, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); saga.next(); @@ -286,6 +291,7 @@ test("SaveSaga should set the correct version numbers", (t) => { [UpdateActions.createEdge(2, 3, 4)], ], TIMESTAMP, + tracingId, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); saga.next(); @@ -319,6 +325,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp [UpdateActions.updateSkeletonTracing(initialState, [3, 4, 5], [0, 0, 1], 3)], ], TIMESTAMP, + tracingId, ); const saga = sendRequestToServer(TRACING_TYPE, tracingId); saga.next(); @@ -354,6 +361,7 @@ test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => ], TIMESTAMP, + tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 3); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 619047cdaca..f612805bcc7 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -27,6 +27,7 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; const TIMESTAMP = 1494347146379; const DateMock = { @@ -82,6 +83,13 @@ function compactSaveQueueWithUpdateActions( tracing: SkeletonTracing, ): Array { return compactSaveQueue( + // todop + // Do we really need compactSaveQueueWithUpdateActions? actually, compactUpdateActions + // is never called with a save queue in prod (instead, the function is called before + // filling the save queue). one could probably combine compactUpdateActions and + // createSaveQueueFromUpdateActions to have a createCompactedSaveQueueFromUpdateActions + // helper function and use that in this spec. + // @ts-expect-error queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), ); } @@ -634,7 +642,11 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -694,7 +706,11 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState2.tracing), @@ -796,7 +812,11 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { ), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -878,7 +898,11 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -936,7 +960,11 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -1008,7 +1036,11 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { updateActions.push( testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + updateActions, + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState2.tracing), @@ -1095,7 +1127,11 @@ test("compactUpdateActions should do nothing if it cannot compact", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -1124,7 +1160,11 @@ test("compactUpdateActions should detect a deleted tree", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), @@ -1156,7 +1196,11 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); + const saveQueue = createSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing.tracingId, + ); const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( saveQueue, enforceSkeletonTracing(newState.tracing), From 4e54e85787f79e3275461fb8c375440bd9ebaff3 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 13:42:00 +0200 Subject: [PATCH 049/361] only have one save queue for all tracings in an annotation --- frontend/javascripts/oxalis/default_state.ts | 6 +-- .../oxalis/model/accessors/save_accessor.ts | 16 ------ .../oxalis/model/reducers/save_reducer.ts | 49 +++---------------- .../oxalis/model/sagas/save_saga.ts | 13 ++--- frontend/javascripts/oxalis/store.ts | 6 +-- .../oxalis/view/action-bar/save_button.tsx | 20 +++----- .../test/reducers/save_reducer.spec.ts | 6 +-- .../test/sagas/saga_integration.spec.ts | 21 ++++---- 8 files changed, 32 insertions(+), 105 deletions(-) diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 298aa359a17..090a3384aa6 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -179,11 +179,7 @@ const defaultState: OxalisState = { annotationLayers: [], }, save: { - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, + queue: [], isBusyInfo: { skeleton: false, volumes: {}, diff --git a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts index e35ed69805d..d9dd9d65aa0 100644 --- a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts @@ -9,19 +9,3 @@ export function isBusy(isBusyInfo: IsBusyInfo): boolean { Utils.values(isBusyInfo.mappings).some((el) => el) ); } -export function selectQueue( - state: OxalisState, - saveQueueType: SaveQueueType, - tracingId: string, -): Array { - switch (saveQueueType) { - case "skeleton": - return state.save.queue.skeleton; - case "volume": - return state.save.queue.volumes[tracingId]; - case "mapping": - return state.save.queue.mappings[tracingId]; - default: - throw new Error(`Unknown save queue type: ${saveQueueType}`); - } -} diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 13d9e7c38f3..b8d75ff86dc 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -10,7 +10,6 @@ import type { import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; import { getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; import { updateKey2 } from "oxalis/model/helpers/deep_update"; import { updateEditableMapping, @@ -61,15 +60,7 @@ function updateTracingDict( } export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { - return ( - queueObj.skeleton.length + - _.sum( - Utils.values(queueObj.volumes).map((volumeQueue: SaveQueueEntry[]) => volumeQueue.length), - ) + - _.sum( - Utils.values(queueObj.mappings).map((mappingQueue: SaveQueueEntry[]) => mappingQueue.length), - ) - ); + return queueObj.length; } function updateVersion(state: OxalisState, action: SetVersionNumberAction) { @@ -118,22 +109,6 @@ function updateLastSaveTimestamp(state: OxalisState, action: SetLastSaveTimestam function SaveReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { - case "INITIALIZE_VOLUMETRACING": { - // Set up empty save queue array for volume tracing - const newVolumesQueue = { ...state.save.queue.volumes, [action.tracing.id]: [] }; - return updateKey2(state, "save", "queue", { - volumes: newVolumesQueue, - }); - } - - case "INITIALIZE_EDITABLE_MAPPING": { - // Set up empty save queue array for editable mapping - const newMappingsQueue = { ...state.save.queue.mappings, [action.mapping.tracingId]: [] }; - return updateKey2(state, "save", "queue", { - mappings: newMappingsQueue, - }); - } - case "PUSH_SAVE_QUEUE_TRANSACTION": { // Use `dispatchedAction` to better distinguish this variable from // update actions. @@ -161,11 +136,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { const transactionGroupCount = updateActionChunks.length; const actionLogInfo = JSON.stringify(getActionLog().slice(-10)); - const oldQueue = selectQueue( - state, - dispatchedAction.saveQueueType, - dispatchedAction.tracingId, - ); + const oldQueue = state.save.queue; const newQueue = oldQueue.concat( updateActionChunks.map((actions, transactionGroupIndex) => ({ // Placeholder, the version number will be updated before sending to the server @@ -203,11 +174,10 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); } - const newQueueObj = updateTracingDict(dispatchedAction, state.save.queue, newQueue); return update(state, { save: { queue: { - $set: newQueueObj, + $set: newQueue, }, progressInfo: { totalActionCount: { @@ -222,7 +192,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { const { count } = action; if (count > 0) { - const queue = selectQueue(state, action.saveQueueType, action.tracingId); + const queue = state.save.queue; const processedQueueActionCount = _.sumBy( queue.slice(0, count), @@ -230,13 +200,12 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { ); const remainingQueue = queue.slice(count); - const newQueueObj = updateTracingDict(action, state.save.queue, remainingQueue); - const remainingQueueLength = getTotalSaveQueueLength(newQueueObj); + const remainingQueueLength = getTotalSaveQueueLength(remainingQueue); const resetCounter = remainingQueueLength === 0; return update(state, { save: { queue: { - $set: newQueueObj, + $set: remainingQueue, }, progressInfo: { // Reset progress counters if the queue is empty. Otherwise, @@ -259,11 +228,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { return update(state, { save: { queue: { - $set: { - skeleton: [], - volumes: _.mapValues(state.save.queue.volumes, () => []), - mappings: _.mapValues(state.save.queue.mappings, () => []), - }, + $set: [], }, progressInfo: { processedActionCount: { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index e12165012f1..4cc5d6e6f5b 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -10,7 +10,6 @@ import _ from "lodash"; import messages from "messages"; import { ControlModeEnum } from "oxalis/constants"; import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; import { selectTracing } from "oxalis/model/accessors/tracing_accessor"; import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; @@ -69,7 +68,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str let saveQueue; // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action // could have been triggered during the call to sendRequestToServer - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length === 0) { if (loopCounter % 100 === 0) { @@ -108,10 +107,10 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str // user didn't use the save button which is usually accompanied by a small pause). const itemCountToSave = forcePush ? Number.POSITIVE_INFINITY - : yield* select((state) => selectQueue(state, saveQueueType, tracingId).length); + : yield* select((state) => state.save.queue.length); let savedItemCount = 0; while (savedItemCount < itemCountToSave) { - saveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length > 0) { savedItemCount += yield* call(sendRequestToServer, saveQueueType, tracingId); @@ -172,7 +171,7 @@ export function* sendRequestToServer( * The saga returns the number of save queue items that were saved. */ - const fullSaveQueue = yield* select((state) => selectQueue(state, saveQueueType, tracingId)); + const fullSaveQueue = yield* select((state) => state.save.queue); const saveQueue = sliceAppropriateBatchCount(fullSaveQueue, saveQueueType); let compactedSaveQueue = compactSaveQueue(saveQueue); const { version } = yield* select((state) => selectTracing(state, saveQueueType, tracingId)); @@ -531,9 +530,7 @@ function* watchForSaveConflicts() { // The latest version on the server is greater than the most-recently // stored version. - const saveQueue = yield* select((state) => - selectQueue(state, tracing.type, tracing.tracingId), - ); + const saveQueue = yield* select((state) => state.save.queue); let msg = ""; if (!allowSave) { diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 57ff2032def..24aca44ec1f 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -457,11 +457,7 @@ export type IsBusyInfo = { }; export type SaveState = { readonly isBusyInfo: IsBusyInfo; - readonly queue: { - readonly skeleton: Array; - readonly volumes: Record>; - readonly mappings: Record>; - }; + readonly queue: Array; readonly lastSaveTimestamp: { readonly skeleton: number; readonly volumes: Record; diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index e577c802e94..87ad0d413fb 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -176,18 +176,14 @@ class SaveButton extends React.PureComponent { function getOldestUnsavedTimestamp(saveQueue: SaveState["queue"]): number | null | undefined { let oldestUnsavedTimestamp; - if (saveQueue.skeleton.length > 0) { - oldestUnsavedTimestamp = saveQueue.skeleton[0].timestamp; - } - - for (const volumeQueue of Utils.values(saveQueue.volumes)) { - if (volumeQueue.length > 0) { - const oldestVolumeTimestamp = volumeQueue[0].timestamp; - oldestUnsavedTimestamp = Math.min( - oldestUnsavedTimestamp != null ? oldestUnsavedTimestamp : Number.POSITIVE_INFINITY, - oldestVolumeTimestamp, - ); - } + if (saveQueue.length > 0) { + // todop: theoretically, could this be not the oldest one? + // e.g., items are added to the queue like that: + // SkelT=1, SkelT=2, SkelT=3, VolT=1 + // now the first action is saved and the following remains: + // SkelT=2, SkelT=3, VolT=1 + // even if it could happen, probably not critical for the current context? + oldestUnsavedTimestamp = saveQueue[0].timestamp; } return oldestUnsavedTimestamp; diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 1ae173ee944..b7a83326ca1 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -29,11 +29,7 @@ const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject volumes: {}, mappings: {}, }, - queue: { - skeleton: [], - volumes: {}, - mappings: {}, - }, + queue: [], lastSaveTimestamp: { skeleton: 0, volumes: {}, diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index c74623c26d9..33d818926c9 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -71,7 +71,7 @@ test.serial( getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, ); // Reset the info field which is just for debugging purposes - const actualSaveQueue = state.save.queue.skeleton.map((entry) => { + const actualSaveQueue = state.save.queue.map((entry) => { return { ...omit(entry, "info"), info: "[]" }; }); // Once the updateTree update action is in the save queue, we're good. @@ -82,24 +82,21 @@ test.serial( test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); const trees = generateDummyTrees(1000, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); - t.is(Store.getState().save.queue.skeleton.length, 1); - t.true( - Store.getState().save.queue.skeleton[0].actions.length < - MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton, - ); + t.is(Store.getState().save.queue.length, 1); + t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); }); test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); const trees = generateDummyTrees(5000, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); const state = Store.getState(); - t.true(state.save.queue.skeleton.length > 1); - t.is(state.save.queue.skeleton[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(state.save.queue.length > 1); + t.is(state.save.queue[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); }); test.serial("Save actions should be chunked after compacting (3/3)", (t) => { @@ -108,12 +105,12 @@ test.serial("Save actions should be chunked after compacting (3/3)", (t) => { const trees = generateDummyTrees(1, nodeCount); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); Store.dispatch(discardSaveQueuesAction()); - t.deepEqual(Store.getState().save.queue.skeleton, []); + t.deepEqual(Store.getState().save.queue, []); // Delete some node, NOTE that this is not the node in the middle of the tree! // The addTreesAndGroupsAction gives new ids to nodes and edges in a non-deterministic way. const middleNodeId = trees[0].nodes[nodeCount / 2].id; Store.dispatch(deleteNodeAction(middleNodeId)); - const { skeleton: skeletonSaveQueue } = Store.getState().save.queue; + const skeletonSaveQueue = Store.getState().save.queue; // There should only be one chunk t.is(skeletonSaveQueue.length, 1); t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); From b15405ef511d334dd1d76c1f80f565aef042ca96 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 14:06:34 +0200 Subject: [PATCH 050/361] also store only one lastSaved and isBusy info for whole annotation --- frontend/javascripts/oxalis/default_state.ts | 12 +-- frontend/javascripts/oxalis/model.ts | 6 +- .../oxalis/model/accessors/save_accessor.ts | 11 --- .../oxalis/model/actions/save_actions.ts | 12 +-- .../oxalis/model/reducers/save_reducer.ts | 33 +------ .../oxalis/model/sagas/save_saga.ts | 95 +++++++++---------- frontend/javascripts/oxalis/store.ts | 13 +-- .../oxalis/view/action-bar/save_button.tsx | 13 ++- .../test/reducers/save_reducer.spec.ts | 12 +-- .../javascripts/test/sagas/save_saga.spec.ts | 26 ++--- 10 files changed, 70 insertions(+), 163 deletions(-) delete mode 100644 frontend/javascripts/oxalis/model/accessors/save_accessor.ts diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 090a3384aa6..6124b8cdfa7 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -180,16 +180,8 @@ const defaultState: OxalisState = { }, save: { queue: [], - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + isBusy: false, + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index f2d9d5db78b..f3c7e63e7bf 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -9,7 +9,6 @@ import { isLayerVisible, } from "oxalis/model/accessors/dataset_accessor"; import { getTotalSaveQueueLength } from "oxalis/model/reducers/save_reducer"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; import { isDatasetAccessibleBySwitching } from "admin/admin_rest_api"; import { saveNowAction } from "oxalis/model/actions/save_actions"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -283,8 +282,7 @@ export class OxalisModel { stateSaved() { const state = Store.getState(); - const storeStateSaved = - !isBusy(state.save.isBusyInfo) && getTotalSaveQueueLength(state.save.queue) === 0; + const storeStateSaved = !state.save.isBusy && getTotalSaveQueueLength(state.save.queue) === 0; const pushQueuesSaved = _.reduce( this.dataLayers, @@ -341,7 +339,7 @@ export class OxalisModel { // The dispatch of the saveNowAction IN the while loop is deliberate. // Otherwise if an update action is pushed to the save queue during the Utils.sleep, // the while loop would continue running until the next save would be triggered. - if (!isBusy(Store.getState().save.isBusyInfo)) { + if (!Store.getState().save.isBusy) { Store.dispatch(saveNowAction()); } diff --git a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts b/frontend/javascripts/oxalis/model/accessors/save_accessor.ts deleted file mode 100644 index d9dd9d65aa0..00000000000 --- a/frontend/javascripts/oxalis/model/accessors/save_accessor.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { IsBusyInfo, OxalisState, SaveQueueEntry } from "oxalis/store"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; -import * as Utils from "libs/utils"; - -export function isBusy(isBusyInfo: IsBusyInfo): boolean { - return ( - isBusyInfo.skeleton || - Utils.values(isBusyInfo.volumes).some((el) => el) || - Utils.values(isBusyInfo.mappings).some((el) => el) - ); -} diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index dca4997b9f6..05f0a1680f9 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -64,24 +64,16 @@ export const discardSaveQueuesAction = () => type: "DISCARD_SAVE_QUEUES", }) as const; -export const setSaveBusyAction = ( - isBusy: boolean, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setSaveBusyAction = (isBusy: boolean) => ({ type: "SET_SAVE_BUSY", isBusy, - saveQueueType, - tracingId, }) as const; -export const setLastSaveTimestampAction = (saveQueueType: SaveQueueType, tracingId: string) => +export const setLastSaveTimestampAction = () => ({ type: "SET_LAST_SAVE_TIMESTAMP", timestamp: Date.now(), - saveQueueType, - tracingId, }) as const; export const setVersionNumberAction = ( diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index b8d75ff86dc..9b022ce96c5 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -81,32 +81,6 @@ function updateVersion(state: OxalisState, action: SetVersionNumberAction) { return state; } -function updateLastSaveTimestamp(state: OxalisState, action: SetLastSaveTimestampAction) { - if (action.saveQueueType === "skeleton") { - return updateKey2(state, "save", "lastSaveTimestamp", { - skeleton: action.timestamp, - }); - } else if (action.saveQueueType === "volume") { - const newVolumesDict = { - ...state.save.lastSaveTimestamp.volumes, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - volumes: newVolumesDict, - }); - } else if (action.saveQueueType === "mapping") { - const newMappingsDict = { - ...state.save.lastSaveTimestamp.mappings, - [action.tracingId]: action.timestamp, - }; - return updateKey2(state, "save", "lastSaveTimestamp", { - mappings: newMappingsDict, - }); - } - - return state; -} - function SaveReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { case "PUSH_SAVE_QUEUE_TRANSACTION": { @@ -243,18 +217,17 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "SET_SAVE_BUSY": { - const newIsBusyInfo = updateTracingDict(action, state.save.isBusyInfo, action.isBusy); return update(state, { save: { - isBusyInfo: { - $set: newIsBusyInfo, + isBusy: { + $set: action.isBusy, }, }, }); } case "SET_LAST_SAVE_TIMESTAMP": { - return updateLastSaveTimestamp(state, action); + return updateKey2(state, "save", "lastSaveTimestamp", action.timestamp); } case "SET_VERSION_NUMBER": { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 4cc5d6e6f5b..b6aa7eab05f 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -57,10 +57,10 @@ import { call, delay, fork, put, race, take, takeEvery } from "typed-redux-saga" const ONE_YEAR_MS = 365 * 24 * 3600 * 1000; -export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: string): Saga { +export function* pushSaveQueueAsync(): Saga { yield* call(ensureWkReady); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); + yield* put(setLastSaveTimestampAction()); let loopCounter = 0; while (true) { @@ -85,7 +85,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str timeout: delay(PUSH_THROTTLE_TIME), forcePush: take("SAVE_NOW"), }); - yield* put(setSaveBusyAction(true, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(true)); // Send (parts) of the save queue to the server. // There are two main cases: @@ -119,7 +119,7 @@ export function* pushSaveQueueAsync(saveQueueType: SaveQueueType, tracingId: str } } - yield* put(setSaveBusyAction(false, saveQueueType, tracingId)); + yield* put(setSaveBusyAction(false)); } } export function sendRequestWithToken( @@ -209,18 +209,16 @@ export function* sendRequestToServer( } yield* put(setVersionNumberAction(version + versionIncrement, saveQueueType, tracingId)); - yield* put(setLastSaveTimestampAction(saveQueueType, tracingId)); + yield* put(setLastSaveTimestampAction()); yield* put(shiftSaveQueueAction(saveQueue.length, saveQueueType, tracingId)); - if (saveQueueType === "volume") { - try { - yield* call(markBucketsAsNotDirty, compactedSaveQueue, tracingId); - } catch (error) { - // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. - console.warn("Error when marking buckets as clean. No retry possible. Error:", error); - exceptionDuringMarkBucketsAsNotDirty = true; - throw error; - } + try { + yield* call(markBucketsAsNotDirty, compactedSaveQueue); + } catch (error) { + // If markBucketsAsNotDirty fails some reason, wk cannot recover from this error. + console.warn("Error when marking buckets as clean. No retry possible. Error:", error); + exceptionDuringMarkBucketsAsNotDirty = true; + throw error; } yield* call(toggleErrorHighlighting, false); @@ -285,33 +283,37 @@ export function* sendRequestToServer( } } -function* markBucketsAsNotDirty(saveQueue: Array, tracingId: string) { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationResolutionInfo = yield* call(getResolutionInfo, segmentationLayer.resolutions); - - if (segmentationLayer != null) { - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { position, mag, additionalCoordinates } = updateAction.value; - const resolutionIndex = segmentationResolutionInfo.getIndexByResolution(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationResolutionInfo.getDenseResolutions(), - resolutionIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); - - if (bucket.type === "null") { - continue; - } - - bucket.dirtyCount--; - - if (bucket.dirtyCount === 0) { - bucket.markAsPushed(); - } +function* markBucketsAsNotDirty(saveQueue: Array) { + for (const saveEntry of saveQueue) { + for (const updateAction of saveEntry.actions) { + if (updateAction.name === "updateBucket") { + // The ID must belong to a segmentation layer because we are handling + // an updateBucket action. + const { actionTracingId: tracingId } = updateAction.value; + const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); + const segmentationResolutionInfo = yield* call( + getResolutionInfo, + segmentationLayer.resolutions, + ); + + const { position, mag, additionalCoordinates } = updateAction.value; + const resolutionIndex = segmentationResolutionInfo.getIndexByResolution(mag); + const zoomedBucketAddress = globalPositionToBucketPosition( + position, + segmentationResolutionInfo.getDenseResolutions(), + resolutionIndex, + additionalCoordinates, + ); + const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); + + if (bucket.type === "null") { + continue; + } + + bucket.dirtyCount--; + + if (bucket.dirtyCount === 0) { + bucket.markAsPushed(); } } } @@ -379,19 +381,11 @@ export function performDiffTracing( } export function* saveTracingAsync(): Saga { + yield* fork(pushSaveQueueAsync); yield* takeEvery("INITIALIZE_SKELETONTRACING", setupSavingForTracingType); yield* takeEvery("INITIALIZE_VOLUMETRACING", setupSavingForTracingType); - yield* takeEvery("INITIALIZE_EDITABLE_MAPPING", setupSavingForEditableMapping); } -export function* setupSavingForEditableMapping( - initializeAction: InitializeEditableMappingAction, -): Saga { - // No diffing needs to be done for editable mappings as the saga pushes update actions - // to the respective save queues, itself - const volumeTracingId = initializeAction.mapping.tracingId; - yield* fork(pushSaveQueueAsync, "mapping", volumeTracingId); -} export function* setupSavingForTracingType( initializeAction: InitializeSkeletonTracingAction | InitializeVolumeTracingAction, ): Saga { @@ -403,7 +397,6 @@ export function* setupSavingForTracingType( const saveQueueType = initializeAction.type === "INITIALIZE_SKELETONTRACING" ? "skeleton" : "volume"; const tracingId = initializeAction.tracing.id; - yield* fork(pushSaveQueueAsync, saveQueueType, tracingId); let prevTracing = (yield* select((state) => selectTracing(state, saveQueueType, tracingId))) as | VolumeTracing | SkeletonTracing; diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 24aca44ec1f..720989e8abc 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -450,19 +450,10 @@ export type ProgressInfo = { readonly processedActionCount: number; readonly totalActionCount: number; }; -export type IsBusyInfo = { - readonly skeleton: boolean; - readonly volumes: Record; - readonly mappings: Record; -}; export type SaveState = { - readonly isBusyInfo: IsBusyInfo; + readonly isBusy: boolean; readonly queue: Array; - readonly lastSaveTimestamp: { - readonly skeleton: number; - readonly volumes: Record; - readonly mappings: Record; - }; + readonly lastSaveTimestamp: number; readonly progressInfo: ProgressInfo; }; export type Flycam = { diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index 87ad0d413fb..ab12ab6fd61 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -2,8 +2,7 @@ import { connect } from "react-redux"; import React from "react"; import _ from "lodash"; import Store, { type SaveState } from "oxalis/store"; -import type { OxalisState, IsBusyInfo } from "oxalis/store"; -import { isBusy } from "oxalis/model/accessors/save_accessor"; +import type { OxalisState } from "oxalis/store"; import ButtonComponent from "oxalis/view/components/button_component"; import { Model } from "oxalis/singletons"; import window from "libs/window"; @@ -25,7 +24,7 @@ type OwnProps = { }; type StateProps = { progressFraction: number | null | undefined; - isBusyInfo: IsBusyInfo; + isBusy: boolean; }; type Props = OwnProps & StateProps; type State = { @@ -101,7 +100,7 @@ class SaveButton extends React.PureComponent { getSaveButtonIcon() { if (this.state.isStateSaved) { return ; - } else if (isBusy(this.props.isBusyInfo)) { + } else if (this.props.isBusy) { return ; } else { return ; @@ -109,7 +108,7 @@ class SaveButton extends React.PureComponent { } shouldShowProgress(): boolean { - return isBusy(this.props.isBusyInfo) && this.props.progressFraction != null; + return this.props.isBusy && this.props.progressFraction != null; } render() { @@ -190,9 +189,9 @@ function getOldestUnsavedTimestamp(saveQueue: SaveState["queue"]): number | null } function mapStateToProps(state: OxalisState): StateProps { - const { progressInfo, isBusyInfo } = state.save; + const { progressInfo, isBusy } = state.save; return { - isBusyInfo, + isBusy, // For a low action count, the progress info would show only for a very short amount of time. // Therefore, the progressFraction is set to null, if the count is low. progressFraction: diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index b7a83326ca1..ff5882e2cf6 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -24,17 +24,9 @@ const tracingId = "1234567890"; const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject } = { activeUser: dummyUser, save: { - isBusyInfo: { - skeleton: false, - volumes: {}, - mappings: {}, - }, + isBusy: false, queue: [], - lastSaveTimestamp: { - skeleton: 0, - volumes: {}, - mappings: {}, - }, + lastSaveTimestamp: 0, progressInfo: { processedActionCount: 0, totalActionCount: 0, diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index ed9f3146fa8..3c8dd47fe3b 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -96,7 +96,7 @@ test("SaveSaga should send update actions", (t) => { saga.next({ forcePush: SaveActions.saveNowAction(), }), - put(setSaveBusyAction(true, TRACING_TYPE, tracingId)), + put(setSaveBusyAction(true)), ); saga.next(); // advance to next select state @@ -104,7 +104,7 @@ test("SaveSaga should send update actions", (t) => { expectValueDeepEqual(t, saga.next(saveQueue), call(sendRequestToServer, TRACING_TYPE, tracingId)); saga.next(saveQueue.length); // select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); // Test that loop repeats saga.next(); // select state @@ -229,7 +229,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh saga.next(1); // advance to select state - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; @@ -248,7 +248,7 @@ test("SaveSaga should not try to reach state with all actions being saved when s saga.next(saveQueue); // call sendRequestToServer - expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false, TRACING_TYPE, tracingId))); + expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should remove the correct update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( @@ -272,11 +272,7 @@ test("SaveSaga should remove the correct update actions", (t) => { saga.next(), put(SaveActions.setVersionNumberAction(3, TRACING_TYPE, tracingId)), ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual( t, saga.next(), @@ -306,11 +302,7 @@ test("SaveSaga should set the correct version numbers", (t) => { saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 3, TRACING_TYPE, tracingId)), ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual( t, saga.next(), @@ -341,11 +333,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 1, TRACING_TYPE, tracingId)), ); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setLastSaveTimestampAction(TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual( t, saga.next(), From 3952474e3f1f1d272101095a87a0a930833a3194 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 14:17:55 +0200 Subject: [PATCH 051/361] make sendRequestToServer independent of tracing --- .../oxalis/model/actions/save_actions.ts | 8 +--- .../oxalis/model/reducers/save_reducer.ts | 37 +------------------ .../oxalis/model/sagas/save_saga.ts | 29 ++++++++------- .../oxalis/model/sagas/save_saga_constants.ts | 12 +++--- 4 files changed, 25 insertions(+), 61 deletions(-) diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 05f0a1680f9..42d8e47747c 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -47,16 +47,10 @@ export const saveNowAction = () => type: "SAVE_NOW", }) as const; -export const shiftSaveQueueAction = ( - count: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const shiftSaveQueueAction = (count: number) => ({ type: "SHIFT_SAVE_QUEUE", count, - saveQueueType, - tracingId, }) as const; export const discardSaveQueuesAction = () => diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 9b022ce96c5..26a9f0e19ce 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -1,12 +1,8 @@ import _ from "lodash"; import update from "immutability-helper"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SaveState, SaveQueueEntry } from "oxalis/store"; -import type { - SetVersionNumberAction, - SetLastSaveTimestampAction, - SaveQueueType, -} from "oxalis/model/actions/save_actions"; +import type { OxalisState, SaveState } from "oxalis/store"; +import type { SetVersionNumberAction } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; import { getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; @@ -16,7 +12,6 @@ import { updateVolumeTracing, } from "oxalis/model/reducers/volumetracing_reducer_helpers"; import Date from "libs/date"; -import * as Utils from "libs/utils"; import type { UpdateAction, UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them @@ -31,34 +26,6 @@ const NOT_IDEMPOTENT_ACTIONS = [ "deleteNode", ]; -type TracingDict = { - skeleton: V; - volumes: Record; - mappings: Record; -}; - -function updateTracingDict( - action: { saveQueueType: SaveQueueType; tracingId: string }, - oldDict: TracingDict, - newValue: V, -): TracingDict { - if (action.saveQueueType === "skeleton") { - return { ...oldDict, skeleton: newValue }; - } else if (action.saveQueueType === "volume") { - return { - ...oldDict, - volumes: { ...oldDict.volumes, [action.tracingId]: newValue }, - }; - } else if (action.saveQueueType === "mapping") { - return { - ...oldDict, - mappings: { ...oldDict.mappings, [action.tracingId]: newValue }, - }; - } - - return oldDict; -} - export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { return queueObj.length; } diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index b6aa7eab05f..0916eca859c 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -113,7 +113,7 @@ export function* pushSaveQueueAsync(): Saga { saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length > 0) { - savedItemCount += yield* call(sendRequestToServer, saveQueueType, tracingId); + savedItemCount += yield* call(sendRequestToServer); } else { break; } @@ -131,17 +131,14 @@ export function sendRequestWithToken( // This function returns the first n batches of the provided array, so that the count of // all actions in these n batches does not exceed MAXIMUM_ACTION_COUNT_PER_SAVE -function sliceAppropriateBatchCount( - batches: Array, - saveQueueType: SaveQueueType, -): Array { +function sliceAppropriateBatchCount(batches: Array): Array { const slicedBatches = []; let actionCount = 0; for (const batch of batches) { const newActionCount = actionCount + batch.actions.length; - if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE[saveQueueType]) { + if (newActionCount <= MAXIMUM_ACTION_COUNT_PER_SAVE) { actionCount = newActionCount; slicedBatches.push(batch); } else { @@ -161,10 +158,7 @@ function getRetryWaitTime(retryCount: number) { // at any time, because the browser page is reloaded after the message is shown, anyway. let didShowFailedSimultaneousTracingError = false; -export function* sendRequestToServer( - saveQueueType: SaveQueueType, - tracingId: string, -): Saga { +export function* sendRequestToServer(): Saga { /* * Saves a reasonably-sized part of the save queue (that corresponds to the * tracingId) to the server (plus retry-mechanism). @@ -172,9 +166,12 @@ export function* sendRequestToServer( */ const fullSaveQueue = yield* select((state) => state.save.queue); - const saveQueue = sliceAppropriateBatchCount(fullSaveQueue, saveQueueType); + const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); - const { version } = yield* select((state) => selectTracing(state, saveQueueType, tracingId)); + const tracings = yield* select((state) => + _.compact([state.tracing.skeleton, ...state.tracing.volumes, ...state.tracing.mappings]), + ); + const version = _.max(tracings.map((t) => t.version)) || 0; const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; @@ -208,9 +205,13 @@ export function* sendRequestToServer( ); } - yield* put(setVersionNumberAction(version + versionIncrement, saveQueueType, tracingId)); + for (const tracing of tracings) { + yield* put( + setVersionNumberAction(version + versionIncrement, tracing.type, tracing.tracingId), + ); + } yield* put(setLastSaveTimestampAction()); - yield* put(shiftSaveQueueAction(saveQueue.length, saveQueueType, tracingId)); + yield* put(shiftSaveQueueAction(saveQueue.length)); try { yield* call(markBucketsAsNotDirty, compactedSaveQueue); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts index 0fdc776eb2c..87ace921de4 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts @@ -17,8 +17,10 @@ export const MAXIMUM_ACTION_COUNT_PER_BATCH = { mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. } as const; -export const MAXIMUM_ACTION_COUNT_PER_SAVE = { - skeleton: 15000, - volume: 3000, - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +// todop: should this be smarter? +// export const MAXIMUM_ACTION_COUNT_PER_SAVE = { +// skeleton: 15000, +// volume: 3000, +// mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. +// } as const; +export const MAXIMUM_ACTION_COUNT_PER_SAVE = 3000; From 48bf1b7f6f2a4694c69ca271a703b5db8a2beb2d Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 14:18:54 +0200 Subject: [PATCH 052/361] rename sendRequestToServer --- .../oxalis/model/sagas/save_saga.ts | 7 +++--- .../javascripts/test/sagas/save_saga.spec.ts | 24 +++++++++++-------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 0916eca859c..44d3eae1b5e 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -42,7 +42,6 @@ import { } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; -import { updateTdCamera } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; import { Model } from "oxalis/singletons"; @@ -67,7 +66,7 @@ export function* pushSaveQueueAsync(): Saga { loopCounter++; let saveQueue; // Check whether the save queue is actually empty, the PUSH_SAVE_QUEUE_TRANSACTION action - // could have been triggered during the call to sendRequestToServer + // could have been triggered during the call to sendSaveRequestToServer saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length === 0) { @@ -113,7 +112,7 @@ export function* pushSaveQueueAsync(): Saga { saveQueue = yield* select((state) => state.save.queue); if (saveQueue.length > 0) { - savedItemCount += yield* call(sendRequestToServer); + savedItemCount += yield* call(sendSaveRequestToServer); } else { break; } @@ -158,7 +157,7 @@ function getRetryWaitTime(retryCount: number) { // at any time, because the browser page is reloaded after the message is shown, anyway. let didShowFailedSimultaneousTracingError = false; -export function* sendRequestToServer(): Saga { +export function* sendSaveRequestToServer(): Saga { /* * Saves a reasonably-sized part of the save queue (that corresponds to the * tracingId) to the server (plus retry-mechanism). diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 3c8dd47fe3b..c85580c719d 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -23,7 +23,7 @@ const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); const { take, call, put } = mockRequire.reRequire("redux-saga/effects"); const { pushSaveQueueAsync, - sendRequestToServer, + sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, @@ -101,7 +101,11 @@ test("SaveSaga should send update actions", (t) => { saga.next(); // advance to next select state - expectValueDeepEqual(t, saga.next(saveQueue), call(sendRequestToServer, TRACING_TYPE, tracingId)); + expectValueDeepEqual( + t, + saga.next(saveQueue), + call(sendSaveRequestToServer, TRACING_TYPE, tracingId), + ); saga.next(saveQueue.length); // select state expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); @@ -116,7 +120,7 @@ test("SaveSaga should send request to server", (t) => { TIMESTAMP, tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ @@ -152,7 +156,7 @@ test("SaveSaga should retry update actions", (t) => { compress: false, }, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ @@ -175,7 +179,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => TIMESTAMP, tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ @@ -225,7 +229,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh saga.next(); // select state - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer saga.next(1); // advance to select state @@ -246,7 +250,7 @@ test("SaveSaga should not try to reach state with all actions being saved when s timeout: "a placeholder", }); // put setSaveBusyAction - saga.next(saveQueue); // call sendRequestToServer + saga.next(saveQueue); // call sendSaveRequestToServer expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); @@ -259,7 +263,7 @@ test("SaveSaga should remove the correct update actions", (t) => { TIMESTAMP, tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ @@ -289,7 +293,7 @@ test("SaveSaga should set the correct version numbers", (t) => { TIMESTAMP, tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ @@ -319,7 +323,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp TIMESTAMP, tracingId, ); - const saga = sendRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); saga.next(); saga.next(saveQueue); saga.next({ From cd101bda3f8a90f7fc7bd1d0fbbe316162ea76f8 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 14:51:01 +0200 Subject: [PATCH 053/361] fix save_reducer.spec --- .../oxalis/model/sagas/mapping_saga.ts | 4 +- .../oxalis/model/sagas/save_saga.ts | 2 - .../test/reducers/save_reducer.spec.ts | 63 ++++++++++--------- 3 files changed, 37 insertions(+), 32 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 0ec9d30ab66..1a0044c4370 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -96,7 +96,9 @@ const takeLatestMappingChange = ( ); const mapping = getMappingInfo(activeMappingByLayer, layerName); - console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + if (process.env.NODE_ENV === "production") { + console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + } if (lastWatcherTask) { console.log("Cancel old bucket watcher"); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 44d3eae1b5e..d9253fc6606 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -13,7 +13,6 @@ import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { selectTracing } from "oxalis/model/accessors/tracing_accessor"; import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import { pushSaveQueueTransaction, setLastSaveTimestampAction, @@ -25,7 +24,6 @@ import type { InitializeSkeletonTracingAction } from "oxalis/model/actions/skele import { SkeletonTracingSaveRelevantActions } from "oxalis/model/actions/skeletontracing_actions"; import { ViewModeSaveRelevantActions } from "oxalis/model/actions/view_mode_actions"; import { - type InitializeEditableMappingAction, type InitializeVolumeTracingAction, VolumeTracingSaveRelevantActions, } from "oxalis/model/actions/volumetracing_actions"; diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index ff5882e2cf6..6428fb94bd8 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -2,10 +2,9 @@ import mockRequire from "mock-require"; import test from "ava"; import "test/reducers/save_reducer.mock"; import dummyUser from "test/fixtures/dummy_user"; -import type { SaveState } from "oxalis/store"; -import type { APIUser } from "types/api_flow_types"; +import type { OxalisState } from "oxalis/store"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -import type { EmptyObject } from "types/globals"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; const TIMESTAMP = 1494695001688; const DateMock = { @@ -16,12 +15,18 @@ const AccessorMock = { }; mockRequire("libs/date", DateMock); mockRequire("oxalis/model/accessors/skeletontracing_accessor", AccessorMock); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer").default; -const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions"); + +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") + .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; +const { createEdge } = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); const tracingId = "1234567890"; -const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject } = { +const initialState = { activeUser: dummyUser, save: { isBusy: false, @@ -33,13 +38,13 @@ const initialState: { save: SaveState; activeUser: APIUser; tracing: EmptyObject }, }, tracing: {}, -}; +} as any as OxalisState; test("Save should add update actions to the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add more update actions to the queue", (t) => { const getItems = (treeId: number) => [createEdge(treeId, 1, 2), createEdge(treeId, 2, 3)]; @@ -50,54 +55,54 @@ test("Save should add more update actions to the queue", (t) => { ); const testState = SaveReducer( initialState, - SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), + SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton", tracingId), ); const newState = SaveReducer( testState, - SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton"), + SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton", tracingId), ); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { - const items = [] as const; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const items: UpdateAction[] = []; + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove one update actions from the queue", (t) => { const firstItem = [createEdge(0, 1, 2)]; const secondItem = [createEdge(1, 2, 3)]; const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP, tracingId); - const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); - const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(1, "skeleton"); + const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton", tracingId); + const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(1); let newState = SaveReducer(initialState, firstPushAction); newState = SaveReducer(newState, secondPushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove zero update actions from the queue", (t) => { const items = [createEdge(0, 1, 2), createEdge(1, 2, 3)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(0, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(0); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, saveQueue); + t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove all update actions from the queue (1/2)", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(2, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); test("Save should remove all update actions from the queue (2/2)", (t) => { const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); - const popAction = SaveActions.shiftSaveQueueAction(5, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); - t.deepEqual(newState.save.queue.skeleton, []); + t.deepEqual(newState.save.queue, []); }); From b3fb1193df1bf3a4a42480de1558e909c97f2920 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 15:25:18 +0200 Subject: [PATCH 054/361] fix save saga spec --- .../javascripts/test/sagas/save_saga.spec.ts | 178 ++++++++++-------- 1 file changed, 102 insertions(+), 76 deletions(-) diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index c85580c719d..8036007eb96 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -18,17 +18,27 @@ mockRequire("libs/date", DateMock); mockRequire("oxalis/model/sagas/root_saga", function* () { yield; }); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const { take, call, put } = mockRequire.reRequire("redux-saga/effects"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); +const SaveActions = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const { take, call, put } = mockRequire.reRequire( + "redux-saga/effects", +) as typeof import("redux-saga/effects"); const { pushSaveQueueAsync, sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, -} = mockRequire.reRequire("oxalis/model/sagas/save_saga"); -const tracingId = "1234567890"; +} = mockRequire.reRequire( + "oxalis/model/sagas/save_saga", +) as typeof import("oxalis/model/sagas/save_saga"); + +const annotationId = "annotation-abcdefgh"; +const tracingId = "tracing-1234567890"; const initialState = { dataset: { dataSource: { @@ -71,8 +81,8 @@ const TRACING_TYPE = "skeleton"; test("SaveSaga should compact multiple updateTracing update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, tracingId, @@ -82,7 +92,7 @@ test("SaveSaga should compact multiple updateTracing update actions", (t) => { test("SaveSaga should send update actions", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); // setLastSaveTimestampAction @@ -101,11 +111,7 @@ test("SaveSaga should send update actions", (t) => { saga.next(); // advance to next select state - expectValueDeepEqual( - t, - saga.next(saveQueue), - call(sendSaveRequestToServer, TRACING_TYPE, tracingId), - ); + expectValueDeepEqual(t, saga.next(saveQueue), call(sendSaveRequestToServer)); saga.next(saveQueue.length); // select state expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); @@ -120,23 +126,31 @@ test("SaveSaga should send request to server", (t) => { TIMESTAMP, tracingId, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + }, + ), ); }); test("SaveSaga should retry update actions", (t) => { @@ -149,20 +163,24 @@ test("SaveSaga should retry update actions", (t) => { t.is(versionIncrement, 2); const requestWithTokenCall = call( sendRequestWithToken, - `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, { method: "POST", data: saveQueueWithVersions, compress: false, }, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); expectValueDeepEqual(t, saga.next(TRACINGSTORE_URL), requestWithTokenCall); saga.throw("Timeout"); expectValueDeepEqual(t, saga.next("Explorational"), call(toggleErrorHighlighting, true)); @@ -179,23 +197,31 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => TIMESTAMP, tracingId, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); expectValueDeepEqual( t, saga.next(TRACINGSTORE_URL), - call(sendRequestWithToken, `${TRACINGSTORE_URL}/tracings/skeleton/1234567890/update?token=`, { - method: "POST", - data: saveQueueWithVersions, - compress: false, - }), + call( + sendRequestWithToken, + `${TRACINGSTORE_URL}/tracings/annotation/${annotationId}/update?token=`, + { + method: "POST", + data: saveQueueWithVersions, + compress: false, + }, + ), ); saga.throw({ status: 409, @@ -215,7 +241,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => test("SaveSaga should send update actions right away and try to reach a state where all updates are saved", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -238,7 +264,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); - const saga = pushSaveQueueAsync(TRACING_TYPE, tracingId); + const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); saga.next(); // select state @@ -257,19 +283,23 @@ test("SaveSaga should not try to reach state with all actions being saved when s test("SaveSaga should remove the correct update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, tracingId, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); expectValueDeepEqual( t, @@ -277,11 +307,7 @@ test("SaveSaga should remove the correct update actions", (t) => { put(SaveActions.setVersionNumberAction(3, TRACING_TYPE, tracingId)), ); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(2, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(2))); }); test("SaveSaga should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( @@ -293,13 +319,17 @@ test("SaveSaga should set the correct version numbers", (t) => { TIMESTAMP, tracingId, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); expectValueDeepEqual( t, @@ -307,29 +337,29 @@ test("SaveSaga should set the correct version numbers", (t) => { put(SaveActions.setVersionNumberAction(LAST_VERSION + 3, TRACING_TYPE, tracingId)), ); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga should set the correct version numbers if the save queue was compacted", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.updateSkeletonTracing(initialState, [1, 2, 3], [0, 0, 1], 1)], - [UpdateActions.updateSkeletonTracing(initialState, [2, 3, 4], [0, 0, 1], 2)], - [UpdateActions.updateSkeletonTracing(initialState, [3, 4, 5], [0, 0, 1], 3)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [1, 2, 3], [], [0, 0, 1], 1)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], + [UpdateActions.updateSkeletonTracing(initialState.tracing, [3, 4, 5], [], [0, 0, 1], 3)], ], TIMESTAMP, tracingId, ); - const saga = sendSaveRequestToServer(TRACING_TYPE, tracingId); + const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next({ - version: LAST_VERSION, - type: TRACING_TYPE, - }); + saga.next([ + { + version: LAST_VERSION, + type: TRACING_TYPE, + tracingId, + }, + ]); + saga.next(annotationId); saga.next(TRACINGSTORE_URL); // two of the updateTracing update actions are removed by compactSaveQueue expectValueDeepEqual( @@ -338,11 +368,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp put(SaveActions.setVersionNumberAction(LAST_VERSION + 1, TRACING_TYPE, tracingId)), ); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.shiftSaveQueueAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( From f5bda6d115482ee54e529caa316b36c0a181e9c1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 15:50:21 +0200 Subject: [PATCH 055/361] fix volumetracing_saga.spec.ts --- .../test/sagas/volumetracing/volumetracing_saga.spec.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index d7b16773e50..f30a50ea181 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -144,7 +144,6 @@ test("VolumeTracingSaga shouldn't do anything if unchanged (saga test)", (t) => const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); @@ -165,7 +164,6 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( VolumeTracingActions.initializeVolumeTracingAction(serverVolumeTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.volumes[0]); From 23f64d9ff11bc82925a4afd703d1e6baf88c6e35 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 15:56:46 +0200 Subject: [PATCH 056/361] fix skeletontracing saga spec --- .../test/sagas/skeletontracing_saga.spec.ts | 133 ++++++++++-------- 1 file changed, 76 insertions(+), 57 deletions(-) diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index f612805bcc7..b19c883f296 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -27,7 +27,8 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; -import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; +import { UpdateAction } from "oxalis/model/sagas/update_actions"; +import { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; const DateMock = { @@ -94,6 +95,22 @@ function compactSaveQueueWithUpdateActions( ); } +function createCompactedSaveQueueFromUpdateActions( + updateActions: UpdateAction[][], + timestamp: number, + tracing: SkeletonTracing, + stats: TracingStats | null = null, +) { + return compactSaveQueue( + createSaveQueueFromUpdateActions( + updateActions.map((batch) => compactUpdateActions(batch, tracing)), + timestamp, + tracing.tracingId, + stats, + ), + ); +} + const skeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, @@ -187,7 +204,6 @@ test("SkeletonTracingSaga shouldn't do anything if unchanged (saga test)", (t) = const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -207,7 +223,6 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { const saga = setupSavingForTracingType( SkeletonTracingActions.initializeSkeletonTracingAction(serverSkeletonTracing), ); - saga.next(); // forking pushSaveQueueAsync saga.next(); saga.next(initialState.tracing.skeleton); @@ -642,20 +657,18 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); + const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; // This should result in a moved treeComponent of size three t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -665,6 +678,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -672,6 +686,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 1, @@ -706,20 +721,18 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( updateActions, TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + skeletonTracing, ); + // This should result in one created node and its edge (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createNode", value: { + actionTracingId: "tracingId", id: 5, treeId: 2, }, @@ -727,6 +740,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.like(simplifiedFirstBatch[1], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 4, target: 5, @@ -738,6 +752,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -747,6 +762,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); @@ -757,6 +773,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[4], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 5, target: 1, @@ -812,20 +829,17 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { ), ); // compactUpdateActions is triggered by the saving, it can therefore contain the results of more than one diffing - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( updateActions, TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); // This should result in a moved treeComponent of size one (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [4], @@ -835,6 +849,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -842,6 +857,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 4, @@ -860,6 +876,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[0], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 1, nodeIds: [5, 6], @@ -868,12 +885,14 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedThirdBatch[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 6, @@ -898,20 +917,19 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); + // This should result in a new tree const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -919,6 +937,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -928,6 +947,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -960,26 +980,24 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); // This should result in two new trees and two moved treeComponents of size three and two const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -988,12 +1006,14 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.like(simplifiedFirstBatch[2], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); t.deepEqual(simplifiedFirstBatch[3], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 3, nodeIds: [5, 6, 7], @@ -1003,6 +1023,7 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.deepEqual(simplifiedFirstBatch[4], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1036,20 +1057,17 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { updateActions.push( testDiffing(newState1.tracing, newState2.tracing, newState1.flycam, newState2.flycam), ); - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( updateActions, TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState2.tracing), + skeletonTracing, ); // This should result in the creation of a new tree (a) const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.like(simplifiedFirstBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1057,6 +1075,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 1, targetId: 2, nodeIds: [3, 4, 5, 6], @@ -1066,6 +1085,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -1078,6 +1098,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.like(simplifiedSecondBatch[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); @@ -1085,6 +1106,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "moveTreeComponent", value: { + actionTracingId: "tracingId", sourceId: 2, targetId: 3, nodeIds: [5, 6], @@ -1094,6 +1116,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[2], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 4, treeId: 2, }, @@ -1127,21 +1150,22 @@ test("compactUpdateActions should do nothing if it cannot compact", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + const saveQueueOriginal = createSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, skeletonTracing.tracingId, ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( + [updateActions], + TIMESTAMP, + skeletonTracing, ); // The deleteTree optimization in compactUpdateActions (that is unrelated to this test) // will remove the first deleteNode update action as the first tree is deleted because of the merge, // therefore remove it here as well - saveQueue[0].actions.shift(); + saveQueueOriginal[0].actions.shift(); // Nothing should be changed as the moveTreeComponent update action cannot be inserted - t.deepEqual(simplifiedUpdateActions, saveQueue); + t.deepEqual(simplifiedUpdateActions, saveQueueOriginal); }); test("compactUpdateActions should detect a deleted tree", (t) => { const testState = ChainReducer(initialState) @@ -1160,19 +1184,16 @@ test("compactUpdateActions should detect a deleted tree", (t) => { testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 2, }, }); @@ -1196,19 +1217,16 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet testState.flycam, newState.flycam, ); - const saveQueue = createSaveQueueFromUpdateActions( + const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, - skeletonTracing.tracingId, - ); - const simplifiedUpdateActions = compactSaveQueueWithUpdateActions( - saveQueue, - enforceSkeletonTracing(newState.tracing), + skeletonTracing, ); const simplifiedFirstBatch = simplifiedUpdateActions[0].actions; t.deepEqual(simplifiedFirstBatch[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 2, }, @@ -1216,6 +1234,7 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet t.deepEqual(simplifiedFirstBatch[1], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 3, treeId: 2, }, From 93420bf4788af950d8a4f2d6e2351fa8140304b1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 16:01:38 +0200 Subject: [PATCH 057/361] fix saga_integration spec --- frontend/javascripts/test/sagas/saga_integration.spec.ts | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 33d818926c9..1d137bd72b7 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -14,6 +14,7 @@ import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import dummyUser from "test/fixtures/dummy_user"; import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; +import { tracing as TaskTracing } from "test/fixtures/tasktracing_server_objects"; const { createTreeMapFromTreeArray, @@ -67,7 +68,7 @@ test.serial( ], ], TIMESTAMP, - "tracingId", + TaskTracing.id, getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, ); // Reset the info field which is just for debugging purposes From d96a001843c719fea1149784f496f6e2336ddb21 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 16 Sep 2024 16:13:52 +0200 Subject: [PATCH 058/361] improve typing in saga integration spec --- .../test/sagas/saga_integration.spec.ts | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 1d137bd72b7..62031c20d51 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -16,16 +16,18 @@ import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; import { tracing as TaskTracing } from "test/fixtures/tasktracing_server_objects"; -const { - createTreeMapFromTreeArray, - generateTreeName, -} = require("oxalis/model/reducers/skeletontracing_reducer_helpers"); +const { createTreeMapFromTreeArray, generateTreeName } = + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", -); -const { discardSaveQueuesAction } = mockRequire.reRequire("oxalis/model/actions/save_actions"); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions"); +) as typeof import("oxalis/model/actions/skeletontracing_actions"); +const { discardSaveQueuesAction } = mockRequire.reRequire( + "oxalis/model/actions/save_actions", +) as typeof import("oxalis/model/actions/save_actions"); +const UpdateActions = mockRequire.reRequire( + "oxalis/model/sagas/update_actions", +) as typeof import("oxalis/model/sagas/update_actions"); test.beforeEach(async (t) => { // Setup oxalis, this will execute model.fetch(...) and initialize the store with the tracing, etc. @@ -59,7 +61,7 @@ test.serial( [ UpdateActions.updateTree(treeWithCorrectName), UpdateActions.updateSkeletonTracing( - Store.getState().tracing.skeleton, + enforceSkeletonTracing(Store.getState().tracing), [1, 2, 3], [], [0, 0, 0], From 9234507156aedcdc483baa0e155817b8ce66a76b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 17 Sep 2024 09:35:32 +0200 Subject: [PATCH 059/361] update action log --- frontend/javascripts/admin/admin_rest_api.ts | 10 ++++---- .../oxalis/model/sagas/save_saga.ts | 10 +++----- .../javascripts/oxalis/view/version_list.tsx | 20 +++++----------- .../test/sagas/skeletontracing_saga.spec.ts | 4 ++-- .../controllers/TSAnnotationController.scala | 13 ++++++++++ .../controllers/TracingController.scala | 9 ------- ...alableminds.webknossos.tracingstore.routes | 24 ++++++++----------- 7 files changed, 38 insertions(+), 52 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 274ca537bb1..e3d69154408 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -927,8 +927,7 @@ export async function getTracingForAnnotationType( export function getUpdateActionLog( tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, oldestVersion?: number, newestVersion?: number, ): Promise> { @@ -942,19 +941,18 @@ export function getUpdateActionLog( params.append("newestVersion", newestVersion.toString()); } return Request.receiveJSON( - `${tracingStoreUrl}/tracings/${versionedObjectType}/${tracingId}/updateActionLog?${params}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, ); }); } export function getNewestVersionForTracing( tracingStoreUrl: string, - tracingId: string, - tracingType: SaveQueueType, + annotationId: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/${tracingType}/${tracingId}/newestVersion?token=${token}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}/newestVersion?token=${token}`, ).then((obj) => obj.version), ); } diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index d9253fc6606..508a62a8e99 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -488,20 +488,16 @@ function* watchForSaveConflicts() { const maybeSkeletonTracing = yield* select((state) => state.tracing.skeleton); const volumeTracings = yield* select((state) => state.tracing.volumes); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracings: Array = _.compact([ ...volumeTracings, maybeSkeletonTracing, ]); - for (const tracing of tracings) { - const versionOnServer = yield* call( - getNewestVersionForTracing, - tracingStoreUrl, - tracing.tracingId, - tracing.type, - ); + const versionOnServer = yield* call(getNewestVersionForTracing, tracingStoreUrl, annotationId); + for (const tracing of tracings) { // Read the tracing version again from the store, since the // old reference to tracing might be outdated now due to the // immutability. diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 9ccdaac3bf5..cb5d79183cf 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -148,8 +148,7 @@ const getGroupedAndChunkedVersions = _.memoize( async function getUpdateActionLogPage( props: Props, tracingStoreUrl: string, - tracingId: string, - versionedObjectType: SaveQueueType, + annotationId: string, newestVersion: number, // 0 is the "newest" page (i.e., the page in which the newest version is) relativePageNumber: number, @@ -177,8 +176,7 @@ async function getUpdateActionLogPage( const updateActionLog = await getUpdateActionLog( tracingStoreUrl, - tracingId, - versionedObjectType, + annotationId, oldestVersionInPage, newestVersionInPage, ); @@ -203,9 +201,10 @@ async function getUpdateActionLogPage( function VersionList(props: Props) { const { tracing } = props; const tracingStoreUrl = useSelector((state: OxalisState) => state.tracing.tracingStore.url); + const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( - () => getNewestVersionForTracing(tracingStoreUrl, tracing.tracingId, props.versionedObjectType), + () => getNewestVersionForTracing(tracingStoreUrl, annotationId), null, [tracing], ); @@ -233,17 +232,10 @@ function InnerVersionList(props: Props & { newestVersion: number }) { if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } - const { tracingId } = props.tracing; const { url: tracingStoreUrl } = Store.getState().tracing.tracingStore; + const annotationId = Store.getState().tracing.annotationId; - return getUpdateActionLogPage( - props, - tracingStoreUrl, - tracingId, - props.versionedObjectType, - newestVersion, - pageParam, - ); + return getUpdateActionLogPage(props, tracingStoreUrl, annotationId, newestVersion, pageParam); } const queryKey = ["versions", props.tracing.tracingId]; diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index b19c883f296..7f5ab13a304 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -27,8 +27,8 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; -import { UpdateAction } from "oxalis/model/sagas/update_actions"; -import { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; const DateMock = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 2245180a963..e2858994667 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -12,6 +12,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext @@ -68,6 +69,18 @@ class TSAnnotationController @Inject()( } } + def newestVersion(token: Option[String], annotationId: String): Action[AnyContent] = Action.async { + implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), + urlOrHeaderToken(token, request)) { + for { + newestVersion <- annotationService.currentMaterializableVersion(annotationId) + } yield JsonOk(Json.obj("version" -> newestVersion)) + } + } + } + def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index a2db158e6a6..d39cf16c671 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -103,15 +103,6 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def newestVersion(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), token) { - Fox.successful(JsonOk(Json.obj("version" -> 0L))) // TODO remove in favor of annotation-wide - } - } - } - def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 9058706cf74..1c7a3c5ab00 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -3,20 +3,20 @@ # ~~~~ # Health endpoint -GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health -POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(token: Option[String], annotationId: String) -GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) -POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) -POST /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(token: Option[String], annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) +GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) +GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(token: Option[String], annotationId: String) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) POST /volume/:annotationId/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) POST /volume/:annotationId/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], annotationId: String, tracingId: String) GET /volume/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -GET /volume/:annotationId/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.newestVersion(token: Option[String], annotationId: String, tracingId: String) GET /volume/:annotationId/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:annotationId/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], annotationId: String, tracingId: String) POST /volume/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) @@ -71,12 +71,8 @@ GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/:coordinate # Skeleton tracings POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save(token: Option[String]) POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple(token: Option[String]) - POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(token: Option[String], persist: Boolean) POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(token: Option[String], persist: Boolean) - -GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -GET /skeleton/:annotationId/:tracingId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.newestVersion(token: Option[String], annotationId: String, tracingId: String) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) - -POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) +POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From a42fe4701bbd221406cc45a2165bf1fd81585612 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 17 Sep 2024 12:08:39 +0200 Subject: [PATCH 060/361] wip editable mappings. tokenContext. --- frontend/javascripts/admin/admin_rest_api.ts | 6 +- .../oxalis/model/sagas/proofread_saga.ts | 3 +- .../oxalis/model_initialization.ts | 4 +- .../util/accesscontext/TokenContext.scala | 4 + .../util/mvc/ExtendedController.scala | 5 +- .../controllers/BinaryDataController.scala | 27 +-- .../controllers/DSMeshController.scala | 21 +- .../controllers/DataSourceController.scala | 146 +++++------- .../controllers/ExportsController.scala | 2 +- .../controllers/ZarrStreamingController.scala | 104 ++++----- .../webknossos/datastore/rpc/RPCRequest.scala | 4 + .../services/AccessTokenService.scala | 26 +-- .../services/DSFullMeshService.scala | 27 ++- .../services/DSRemoteTracingstoreClient.scala | 81 +++---- .../services/DSRemoteWebknossosClient.scala | 32 ++- .../services/MeshMappingHelper.scala | 6 +- .../services/uploading/ComposeService.scala | 5 +- .../TSRemoteDatastoreClient.scala | 89 +++----- .../TSRemoteWebknossosClient.scala | 5 +- .../AnnotationTransactionService.scala | 41 ++-- .../annotation/AnnotationWithTracings.scala | 5 + .../annotation/TSAnnotationService.scala | 45 ++-- .../SkeletonTracingController.scala | 10 +- .../controllers/TSAnnotationController.scala | 22 +- .../controllers/TracingController.scala | 33 +-- .../controllers/VolumeTracingController.scala | 215 +++++++----------- ...VolumeTracingZarrStreamingController.scala | 79 +++---- .../tracings/RemoteFallbackLayer.scala | 12 +- .../tracings/TracingService.scala | 27 +-- .../EditableMappingLayer.scala | 25 +- .../EditableMappingService.scala | 204 +++++++---------- .../EditableMappingUpdater.scala | 28 +-- .../skeleton/SkeletonTracingService.scala | 13 +- .../tracings/volume/TSFullMeshService.scala | 71 +++--- .../volume/VolumeSegmentIndexBuffer.scala | 19 +- .../volume/VolumeSegmentIndexService.scala | 68 +++--- .../VolumeSegmentStatisticsService.scala | 57 +++-- .../volume/VolumeTracingBucketHelper.scala | 14 +- .../volume/VolumeTracingDownsampling.scala | 21 +- .../tracings/volume/VolumeTracingLayer.scala | 3 +- .../volume/VolumeTracingService.scala | 182 +++++++-------- ...alableminds.webknossos.tracingstore.routes | 1 + 42 files changed, 746 insertions(+), 1046 deletions(-) create mode 100644 util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e3d69154408..ee6816d8845 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1614,11 +1614,12 @@ export function fetchMapping( export function makeMappingEditable( tracingStoreUrl: string, + annotationId: string, tracingId: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/makeMappingEditable?token=${token}`, + `${tracingStoreUrl}/tracings/volume/${annotationId}/${tracingId}/makeMappingEditable?token=${token}`, { method: "POST", }, @@ -1628,10 +1629,11 @@ export function makeMappingEditable( export function getEditableMappingInfo( tracingStoreUrl: string, + annotationId: string, tracingId: string, ): Promise { return doWithToken((token) => - Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?token=${token}`), + Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/info?token=${token}`), ); } diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index e70b43a7058..0fc1a9e5d78 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -265,6 +265,7 @@ function* createEditableMapping(): Saga { * name of the HDF5 mapping for which the editable mapping is about to be created. */ const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); // Save before making the mapping editable to make sure the correct mapping is activated in the backend yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date @@ -275,7 +276,7 @@ function* createEditableMapping(): Saga { const volumeTracingId = upToDateVolumeTracing.tracingId; const layerName = volumeTracingId; - const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); + const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, annotationId, volumeTracingId); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); yield* put(setMappingNameAction(layerName, serverEditableMapping.mappingName, "HDF5")); diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index c87d88844a0..c9ebfe4a751 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -213,6 +213,7 @@ export async function initialize( if (annotation != null) { const editableMappings = await fetchEditableMappings( annotation.tracingStore.url, + annotation.id, serverVolumeTracings, ); initializeTracing(annotation, serverTracings, editableMappings); @@ -248,11 +249,12 @@ async function fetchParallel( async function fetchEditableMappings( tracingStoreUrl: string, + annotationId: string, serverVolumeTracings: ServerVolumeTracing[], ): Promise { const promises = serverVolumeTracings .filter((tracing) => tracing.hasEditableMapping) - .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id)); + .map((tracing) => getEditableMappingInfo(tracingStoreUrl, annotationId, tracing.id)); return Promise.all(promises); } diff --git a/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala new file mode 100644 index 00000000000..2a74b356bf4 --- /dev/null +++ b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala @@ -0,0 +1,4 @@ +package com.scalableminds.util.accesscontext + +// to be used in datastore and tracingstore to hand around tokens that were supplied with the request +case class TokenContext(userTokenOpt: Option[String]) diff --git a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala index af2a52e2db8..b5f05b3810b 100644 --- a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala +++ b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala @@ -1,6 +1,7 @@ package com.scalableminds.util.mvc import com.google.protobuf.CodedInputStream +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common._ @@ -235,8 +236,8 @@ trait ValidationHelpers { } trait RequestTokenHelper { - protected def urlOrHeaderToken(token: Option[String], request: Request[Any]): Option[String] = - token.orElse(request.headers.get("X-Auth-Token")) + implicit def tokenContextForRequest(implicit request: Request[Any]): TokenContext = + TokenContext(request.target.getQueryParameter("token").orElse(request.headers.get("X-Auth-Token"))) } trait ExtendedController diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index bac34609baf..50541f438c9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -53,8 +53,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { logTime(slackNotificationService.noticeSlowRequest) { val t = Instant.now for { @@ -96,8 +95,7 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -121,8 +119,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -144,8 +141,7 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -179,8 +175,7 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -227,8 +222,7 @@ class BinaryDataController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -248,8 +242,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -290,8 +283,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -309,8 +301,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index f2f4d5921c0..062adc19c8b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -28,8 +28,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) } yield Ok(Json.toJson(meshFiles)) @@ -49,8 +48,7 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, @@ -65,8 +63,7 @@ class DSMeshController @Inject()( editableMappingTracingId, request.body.segmentId, mappingNameForMeshFile, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) chunkInfos <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetName, @@ -82,8 +79,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield { @@ -99,14 +95,9 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], - organizationId, - datasetName, - dataLayerName, - request.body) ?~> "mesh.file.loadChunk.failed" + data: Array[Byte] <- fullMeshService.loadFor(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 2e05d04252d..ff5befccdc3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -69,8 +69,7 @@ class DataSourceController @Inject()( Action.async { implicit request => { accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = dataSourceService.dataSourceFromDir( @@ -82,7 +81,7 @@ class DataSourceController @Inject()( } def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources) { for { _ <- dataSourceService.checkInbox(verbose = true) } yield Ok @@ -91,12 +90,11 @@ class DataSourceController @Inject()( def reserveUpload(token: Option[String]): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization)) { for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { - (remoteWebknossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") + (remoteWebknossosClient.reserveDataSourceUpload(request.body) ?~> "dataset.upload.validation.failed") .flatMap(_ => uploadService.reserveUpload(request.body)) } else Fox.successful(()) } yield Ok @@ -105,11 +103,9 @@ class DataSourceController @Inject()( def getUnfinishedUploads(token: Option[String], organizationName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName)) { for { - unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(urlOrHeaderToken(token, request), - organizationName) + unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(organizationName) unfinishedUploadsWithUploadIds <- uploadService.addUploadIdsToUnfinishedUploads(unfinishedUploads) } yield Ok(Json.toJson(unfinishedUploadsWithUploadIds)) } @@ -119,8 +115,7 @@ class DataSourceController @Inject()( // and it can be put in a webknossos folder where they have access def reserveManualUpload(token: Option[String]): Action[ReserveManualUploadInformation] = Action.async(validateJson[ReserveManualUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization)) { for { _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -132,8 +127,7 @@ class DataSourceController @Inject()( None, request.body.initialTeamIds, request.body.folderId - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" } yield Ok } @@ -172,8 +166,7 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(uploadFileId)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(uploadFileId) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -195,8 +188,7 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(resumableIdentifier)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(resumableIdentifier) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -212,16 +204,13 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" - _ <- remoteWebknossosClient.reportUpload( - dataSourceId, - datasetSizeBytes, - request.body.needsConversion.getOrElse(false), - viaAddRoute = false, - userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" + _ <- remoteWebknossosClient.reportUpload(dataSourceId, + datasetSizeBytes, + request.body.needsConversion.getOrElse(false), + viaAddRoute = false) ?~> "reportUpload.failed" } yield Ok } } yield result @@ -235,8 +224,7 @@ class DataSourceController @Inject()( case true => uploadService.getDataSourceIdByUploadId(request.body.uploadId) } dataSourceIdFox.flatMap { dataSourceId => - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" _ <- uploadService.cancelUpload(request.body) ?~> "Could not cancel the upload." @@ -252,8 +240,7 @@ class DataSourceController @Inject()( dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { addNoCacheHeaderFallback( Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetName, dataLayerName)))) } @@ -265,8 +252,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetName, dataLayerName) @@ -282,8 +268,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, @@ -303,8 +288,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( @@ -322,8 +306,7 @@ class DataSourceController @Inject()( mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( @@ -340,8 +323,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox largestAgglomerateId: Long <- agglomerateService @@ -365,8 +347,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Seq[Long] <- agglomerateService @@ -391,8 +372,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Array[Long] <- agglomerateService @@ -411,8 +391,7 @@ class DataSourceController @Inject()( def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( @@ -428,7 +407,7 @@ class DataSourceController @Inject()( datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources) { for { _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") @@ -442,24 +421,21 @@ class DataSourceController @Inject()( layersToLink = None, initialTeams = List.empty, folderId = folderId, - ), - urlOrHeaderToken(token, request) + ) ) ?~> "dataset.upload.validation.failed" _ <- dataSourceService.updateDataSource(request.body.copy(id = DataSourceId(datasetName, organizationId)), expectExisting = false) - _ <- remoteWebknossosClient.reportUpload( - DataSourceId(datasetName, organizationId), - 0L, - needsConversion = false, - viaAddRoute = true, - userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" + _ <- remoteWebknossosClient.reportUpload(DataSourceId(datasetName, organizationId), + 0L, + needsConversion = false, + viaAddRoute = true) ?~> "reportUpload.failed" } yield Ok } } def createOrganizationDirectory(token: Option[String], organizationId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources(organizationId), token) { + accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources(organizationId)) { val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") newOrganizationDirectory.mkdirs() if (newOrganizationDirectory.isDirectory) @@ -474,8 +450,7 @@ class DataSourceController @Inject()( datasetName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId)) { for { before <- Fox.successful(System.currentTimeMillis()) usedStorageInBytes: List[DirectoryStorageReport] <- storageUsageService.measureStorage(organizationId, @@ -495,8 +470,7 @@ class DataSourceController @Inject()( datasetName: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetName, layerName) val reloadedDataSource = dataSourceService.dataSourceFromDir( @@ -519,8 +493,7 @@ class DataSourceController @Inject()( def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => val dataSourceId = DataSourceId(datasetName, organizationId) - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- binaryDataServiceHolder.binaryDataService.deleteOnDisk( organizationId, @@ -533,15 +506,12 @@ class DataSourceController @Inject()( def compose(token: Option[String]): Action[ComposeRequest] = Action.async(validateJson[ComposeRequest]) { implicit request => - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId)) { for { - _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)( - id => - accessTokenService.assertUserAccess( - UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), - userToken)) - dataSource <- composeService.composeDataset(request.body, userToken) + _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)(id => + accessTokenService.assertUserAccess( + UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)))) + dataSource <- composeService.composeDataset(request.body) _ <- dataSourceRepository.updateDataSource(dataSource) } yield Ok } @@ -552,8 +522,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) for { @@ -574,8 +543,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -591,8 +559,7 @@ class DataSourceController @Inject()( dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -609,8 +576,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -625,8 +591,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -641,8 +606,7 @@ class DataSourceController @Inject()( dataSetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) @@ -659,8 +623,7 @@ class DataSourceController @Inject()( dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, @@ -670,8 +633,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentId.toLong, mappingNameForMeshFile = None, - omitMissing = false, - urlOrHeaderToken(token, request) + omitMissing = false ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -698,8 +660,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -712,7 +673,6 @@ class DataSourceController @Inject()( segmentOrAgglomerateId, mappingNameForMeshFile = None, omitMissing = true, // assume agglomerate ids not present in the mapping belong to user-brushed segments - urlOrHeaderToken(token, request) ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -732,8 +692,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -755,8 +714,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -774,7 +732,7 @@ class DataSourceController @Inject()( // Called directly by wk side def exploreRemoteDataset(token: Option[String]): Action[ExploreRemoteDatasetRequest] = Action.async(validateJson[ExploreRemoteDatasetRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId), token) { + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId)) { val reportMutable = ListBuffer[String]() val hasLocalFilesystemRequest = request.body.layerParameters.exists(param => new URI(param.remoteUri).getScheme == DataVaultService.schemeFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index f4777fdb4b9..009ac58d0f5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -36,7 +36,7 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, override def allowRemoteOrigin: Boolean = true def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId)) { for { exportProperties <- webknossosClient.getJobExportProperties(jobId) fullPath = exportProperties.fullPathIn(dataBaseDir) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 2c94c3386e6..1f86aef3d08 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -55,8 +56,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -73,8 +73,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -94,12 +93,11 @@ class ZarrStreamingController @Inject()( dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getOmeNgffHeader(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(ngffMetadata => Ok(Json.toJson(ngffMetadata))) }, orElse = annotationSource => @@ -120,12 +118,12 @@ class ZarrStreamingController @Inject()( dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => { + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => { remoteTracingstoreClient - .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZarrJsonGroupHeaderWithNgff(annotationLayer.tracingId, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(header => Ok(Json.toJson(header))) }, orElse = annotationSource => @@ -153,8 +151,7 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers @@ -206,9 +203,9 @@ class ZarrStreamingController @Inject()( zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) @@ -221,8 +218,7 @@ class ZarrStreamingController @Inject()( remoteTracingstoreClient.getVolumeLayerAsZarrLayer(l.tracingId, Some(l.name), annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion)) + zarrVersion)(relevantTokenContext)) allLayer = dataSourceLayers ++ annotationLayers zarrSource = GenericDataSource[DataLayer](dataSource.id, allLayer, dataSource.scale) } yield Ok(Json.toJson(zarrSource)) @@ -236,8 +232,7 @@ class ZarrStreamingController @Inject()( mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) } } @@ -249,16 +244,12 @@ class ZarrStreamingController @Inject()( coordinates: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getRawZarrCube(annotationLayer.tracingId, - mag, - coordinates, - annotationSource.tracingStoreUrl, - relevantToken) + .getRawZarrCube(annotationLayer.tracingId, mag, coordinates, annotationSource.tracingStoreUrl)( + relevantTokenContext) .map(Ok(_)), orElse = annotationSource => rawZarrCube(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag, coordinates) @@ -307,8 +298,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zArray(organizationId, datasetName, dataLayerName, mag) } } @@ -329,8 +319,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) } } @@ -350,12 +339,11 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) + .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) @@ -367,12 +355,11 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) + .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) @@ -380,18 +367,17 @@ class ZarrStreamingController @Inject()( } private def ifIsAnnotationLayerOrElse( - token: Option[String], accessToken: String, dataLayerName: String, - ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, Option[String]) => Fox[Result], + ifIsAnnotationLayer: (AnnotationLayer, AnnotationSource, TokenContext) => Fox[Result], orElse: AnnotationSource => Fox[Result])(implicit request: Request[Any]): Fox[Result] = for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND - relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) - else urlOrHeaderToken(token, request) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND + relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) + else tokenContextForRequest layer = annotationSource.getAnnotationLayer(dataLayerName) result <- layer match { - case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantToken) + case Some(annotationLayer) => ifIsAnnotationLayer(annotationLayer, annotationSource, relevantTokenContext) case None => orElse(annotationSource) } } yield result @@ -403,8 +389,7 @@ class ZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) } } @@ -435,16 +420,14 @@ class ZarrStreamingController @Inject()( zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient .getDataLayerMagFolderContents(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + zarrVersion)(relevantTokenContext) .map( layers => Ok( @@ -467,8 +450,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) } } @@ -498,15 +480,12 @@ class ZarrStreamingController @Inject()( zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerFolderContents(annotationLayer.tracingId, - annotationSource.tracingStoreUrl, - relevantToken, - zarrVersion) + .getDataLayerFolderContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( + relevantTokenContext) .map( layers => Ok( @@ -528,8 +507,7 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND @@ -550,7 +528,7 @@ class ZarrStreamingController @Inject()( zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND @@ -577,8 +555,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { Ok(zGroupJson) } } @@ -588,12 +565,11 @@ class ZarrStreamingController @Inject()( def zGroupPrivateLink(token: Option[String], accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( - token, accessToken, dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantToken) => + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl, relevantToken) + .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(Ok(_)), orElse = _ => Fox.successful(Ok(zGroupJson)) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 1d99f8aab03..d26675b7e6d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.rpc +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.mvc.MimeTypes import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -26,6 +27,9 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: this } + def withTokenFromContext(implicit tc: TokenContext): RPCRequest = + addQueryStringOptional("token", tc.userTokenOpt) + def addHttpHeaders(hdrs: (String, String)*): RPCRequest = { request = request.addHttpHeaders(hdrs: _*) this diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 63db94987be..52b56268077 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.tools.Fox @@ -66,28 +67,27 @@ trait AccessTokenService { private lazy val accessAnswersCache: AlfuCache[(UserAccessRequest, Option[String]), UserAccessAnswer] = AlfuCache(timeToLive = AccessExpiration, timeToIdle = AccessExpiration) - def validateAccessForSyncBlock(accessRequest: UserAccessRequest, token: Option[String])(block: => Result)( - implicit ec: ExecutionContext): Fox[Result] = - validateAccess(accessRequest, token) { + def validateAccessForSyncBlock(accessRequest: UserAccessRequest)(block: => Result)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Result] = + validateAccess(accessRequest) { Future.successful(block) } - def validateAccess(accessRequest: UserAccessRequest, token: Option[String])(block: => Future[Result])( - implicit ec: ExecutionContext): Fox[Result] = + def validateAccess(accessRequest: UserAccessRequest)(block: => Future[Result])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Result] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." result <- executeBlockOnPositiveAnswer(userAccessAnswer, block) } yield result - private def hasUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[UserAccessAnswer] = - accessAnswersCache.getOrLoad((accessRequest, token), - _ => remoteWebknossosClient.requestUserAccess(token, accessRequest)) + private def hasUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[UserAccessAnswer] = + accessAnswersCache.getOrLoad((accessRequest, tc.userTokenOpt), + _ => remoteWebknossosClient.requestUserAccess(accessRequest)) - def assertUserAccess(accessRequest: UserAccessRequest, token: Option[String])( - implicit ec: ExecutionContext): Fox[Unit] = + def assertUserAccess(accessRequest: UserAccessRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = for { - userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." _ <- Fox.bool2Fox(userAccessAnswer.granted) ?~> userAccessAnswer.msg.getOrElse("Access forbidden.") } yield () diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 24c14630a91..c0af56127ed 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -49,14 +50,13 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def loadFor(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + def loadFor(organizationId: String, datasetName: String, dataLayerName: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = fullMeshRequest.meshFileName match { case Some(_) => - loadFullMeshFromMeshfile(token, organizationId, datasetName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshfile(organizationId, datasetName, dataLayerName, fullMeshRequest) case None => loadFullMeshFromAdHoc(organizationId, datasetName, dataLayerName, fullMeshRequest) } @@ -113,12 +113,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, } yield allVertices } - private def loadFullMeshFromMeshfile( - token: Option[String], - organizationId: String, - datasetName: String, - layerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + private def loadFullMeshFromMeshfile(organizationId: String, + datasetName: String, + layerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" before = Instant.now @@ -134,8 +134,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, fullMeshRequest.editableMappingTracingId, fullMeshRequest.segmentId, mappingNameForMeshFile, - omitMissing = false, - token + omitMissing = false ) chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala index 2924c0687e4..5bd69d4d7c9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala @@ -1,8 +1,8 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.layers.ZarrSegmentationLayer import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffMetadata, ZarrHeader} import com.scalableminds.webknossos.datastore.datareaders.zarr3.{Zarr3ArrayHeader, Zarr3GroupHeader} @@ -21,88 +21,63 @@ object EditableMappingSegmentListResult { class DSRemoteTracingstoreClient @Inject()( rpc: RPC, - config: DataStoreConfig, val lifecycle: ApplicationLifecycle, ) extends LazyLogging with FoxImplicits { + private def getZarrVersionDependantSubPath = (zarrVersion: Int) => if (zarrVersion == 2) "zarr" else "zarr3_experimental" - def getZArray(tracingId: String, mag: String, tracingStoreUri: String, token: Option[String]): Fox[ZarrHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray") - .addQueryStringOptional("token", token) + def getZArray(tracingId: String, mag: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[ZarrHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/.zarray").withTokenFromContext .getWithJsonResponse[ZarrHeader] - def getZarrJson(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3ArrayHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJson(tracingId: String, mag: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3ArrayHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/$mag/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3ArrayHeader] def getVolumeLayerAsZarrLayer(tracingId: String, tracingName: Option[String], tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[ZarrSegmentationLayer] = { + zarrVersion: Int)(implicit tc: TokenContext): Fox[ZarrSegmentationLayer] = { val zarrVersionDependantSubPath = getZarrVersionDependantSubPath(zarrVersion) - rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource") - .addQueryStringOptional("token", token) + rpc(s"$tracingStoreUri/tracings/volume/$zarrVersionDependantSubPath/$tracingId/zarrSource").withTokenFromContext .addQueryStringOptional("tracingName", tracingName) .getWithJsonResponse[ZarrSegmentationLayer] } - def getOmeNgffHeader(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[NgffMetadata] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs") - .addQueryStringOptional("token", token) + def getOmeNgffHeader(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[NgffMetadata] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zattrs").withTokenFromContext .getWithJsonResponse[NgffMetadata] - def getZarrJsonGroupHeaderWithNgff(tracingId: String, - tracingStoreUri: String, - token: Option[String]): Fox[Zarr3GroupHeader] = - rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json") - .addQueryStringOptional("token", token) + def getZarrJsonGroupHeaderWithNgff(tracingId: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Zarr3GroupHeader] = + rpc(s"$tracingStoreUri/tracings/volume/zarr3_experimental/$tracingId/zarr.json").withTokenFromContext .getWithJsonResponse[Zarr3GroupHeader] - def getRawZarrCube(tracingId: String, - mag: String, - cxyz: String, - tracingStoreUri: String, - token: Option[String]): Fox[Array[Byte]] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent - .addQueryStringOptional("token", token) - .getWithBytesResponse + def getRawZarrCube(tracingId: String, mag: String, cxyz: String, tracingStoreUri: String)( + implicit tc: TokenContext): Fox[Array[Byte]] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/$mag/$cxyz").silent.withTokenFromContext.getWithBytesResponse - def getDataLayerMagFolderContents(tracingId: String, - mag: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag") - .addQueryStringOptional("token", token) + def getDataLayerMagFolderContents(tracingId: String, mag: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId/$mag").withTokenFromContext .getWithJsonResponse[List[String]] - def getDataLayerFolderContents(tracingId: String, - tracingStoreUri: String, - token: Option[String], - zarrVersion: Int): Fox[List[String]] = - rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId") - .addQueryStringOptional("token", token) + def getDataLayerFolderContents(tracingId: String, tracingStoreUri: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId").withTokenFromContext .getWithJsonResponse[List[String]] - def getZGroup(tracingId: String, tracingStoreUri: String, token: Option[String]): Fox[JsObject] = - rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup") - .addQueryStringOptional("token", token) - .getWithJsonResponse[JsObject] + def getZGroup(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[JsObject] = + rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup").withTokenFromContext.getWithJsonResponse[JsObject] - def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, - tracingId: String, - agglomerateId: Long, - token: Option[String]): Fox[EditableMappingSegmentListResult] = + def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, tracingId: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[EditableMappingSegmentListResult] = rpc(s"$tracingStoreUri/tracings/mapping/$tracingId/segmentsForAgglomerate") .addQueryString("agglomerateId" -> agglomerateId.toString) - .addQueryStringOptional("token", token) + .withTokenFromContext .silent .getWithJsonResponse[EditableMappingSegmentListResult] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 0703f638076..b4bcafed613 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -3,6 +3,7 @@ package com.scalableminds.webknossos.datastore.services import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -33,7 +34,7 @@ object TracingStoreInfo { } trait RemoteWebknossosClient { - def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] + def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } class DSRemoteWebknossosClient @Inject()( @@ -68,21 +69,17 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .put(dataSource) - def getUnfinishedUploadsForUser(userTokenOpt: Option[String], organizationName: String): Fox[List[UnfinishedUpload]] = + def getUnfinishedUploadsForUser(organizationName: String)(implicit tc: TokenContext): Fox[List[UnfinishedUpload]] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" unfinishedUploads <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/getUnfinishedUploadsForUser") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) .addQueryString("organizationName" -> organizationName) + .withTokenFromContext .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(dataSourceId: DataSourceId, - datasetSizeBytes: Long, - needsConversion: Boolean, - viaAddRoute: Boolean, - userToken: Option[String]): Fox[Unit] = + def reportUpload(dataSourceId: DataSourceId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)( + implicit tc: TokenContext): Fox[Unit] = for { _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) @@ -90,7 +87,7 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("needsConversion" -> needsConversion.toString) .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) - .addQueryStringOptional("token", userToken) + .withTokenFromContext .post() } yield () @@ -100,12 +97,11 @@ class DSRemoteWebknossosClient @Inject()( .silent .put(dataSources) - def reserveDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] = + def reserveDataSourceUpload(info: ReserveUploadInformation)(implicit tc: TokenContext): Fox[Unit] = for { - userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) - .addQueryString("token" -> userToken) + .withTokenFromContext .post(info) } yield () @@ -118,10 +114,10 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[JobExportProperties] - override def requestUserAccess(userToken: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/validateUserAccess") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("token", userToken) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) private lazy val tracingstoreUriCache: AlfuCache[String, String] = AlfuCache() @@ -141,13 +137,13 @@ class DSRemoteWebknossosClient @Inject()( private lazy val annotationSourceCache: AlfuCache[(String, Option[String]), AnnotationSource] = AlfuCache(timeToLive = 5 seconds, timeToIdle = 5 seconds) - def getAnnotationSource(accessToken: String, userToken: Option[String]): Fox[AnnotationSource] = + def getAnnotationSource(accessToken: String)(implicit tc: TokenContext): Fox[AnnotationSource] = annotationSourceCache.getOrLoad( - (accessToken, userToken), + (accessToken, tc.userTokenOpt), _ => rpc(s"$webknossosUri/api/annotations/source/$accessToken") .addQueryString("key" -> dataStoreKey) - .addQueryStringOptional("userToken", userToken) + .withTokenFromContext .getWithJsonResponse[AnnotationSource] ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index 391c234fc6b..14279974079 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey @@ -22,7 +23,7 @@ trait MeshMappingHelper { agglomerateId: Long, mappingNameForMeshFile: Option[String], omitMissing: Boolean, // If true, failing lookups in the agglomerate file will just return empty list. - token: Option[String])(implicit ec: ExecutionContext): Fox[List[Long]] = + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Long]] = (targetMappingName, editableMappingTracingId) match { case (None, None) => // No mapping selected, assume id matches meshfile @@ -58,8 +59,7 @@ trait MeshMappingHelper { tracingstoreUri <- dsRemoteWebknossosClient.getTracingstoreUri segmentIdsResult <- dsRemoteTracingstoreClient.getEditableMappingSegmentIdsForAgglomerate(tracingstoreUri, tracingId, - agglomerateId, - token) + agglomerateId) segmentIds <- if (segmentIdsResult.agglomerateIdIsPresent) Fox.successful(segmentIdsResult.segmentIds) else // the agglomerate id is not present in the editable mapping. Fetch its info from the base mapping. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 26d16943db4..efed327ef37 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services.uploading +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.layers.{ @@ -67,7 +68,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, name: String): Path = dataBaseDir.resolve(organizationId).resolve(name) - def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[DataSource] = + def composeDataset(composeRequest: ComposeRequest)(implicit tc: TokenContext): Fox[DataSource] = for { _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) reserveUploadInfo = ReserveUploadInformation("", @@ -78,7 +79,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, None, List(), Some(composeRequest.targetFolderId)) - _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." + _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo) ?~> "Failed to reserve upload." directory = uploadDirectory(composeRequest.organizationId, composeRequest.newDatasetName) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, composeRequest.organizationId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 564e51d671a..15f8dd30475 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox @@ -38,36 +39,28 @@ class TSRemoteDatastoreClient @Inject()( private lazy val largestAgglomerateIdCache: AlfuCache[(RemoteFallbackLayer, String, Option[String]), Long] = AlfuCache(timeToLive = 10 minutes) - def getAgglomerateSkeleton(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - agglomerateId: Long): Fox[Array[Byte]] = + def getAgglomerateSkeleton(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId") - .addQueryStringOptional("token", userToken) - .getWithBytesResponse + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/skeleton/$agglomerateId").withTokenFromContext.getWithBytesResponse } yield result - def getData(remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = + def getData(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - response <- rpc(s"$remoteLayerUri/data").addQueryStringOptional("token", userToken).silent.post(dataRequests) + response <- rpc(s"$remoteLayerUri/data").withTokenFromContext.silent.post(dataRequests) _ <- bool2Fox(Status.isSuccessful(response.status)) bytes = response.bodyAsBytes.toArray indices <- parseMissingBucketHeader(response.header(missingBucketsHeader)) ?~> "failed to parse missing bucket header" } yield (bytes, indices) - def getVoxelAtPosition(userToken: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - pos: Vec3Int, - mag: Vec3Int): Fox[Array[Byte]] = + def getVoxelAtPosition(remoteFallbackLayer: RemoteFallbackLayer, pos: Vec3Int, mag: Vec3Int)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/data") - .addQueryStringOptional("token", userToken) + result <- rpc(s"$remoteLayerUri/data").withTokenFromContext .addQueryString("x" -> pos.x.toString) .addQueryString("y" -> pos.y.toString) .addQueryString("z" -> pos.z.toString) @@ -81,33 +74,25 @@ class TSRemoteDatastoreClient @Inject()( def getAgglomerateIdsForSegmentIds(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String, - segmentIdsOrdered: List[Long], - userToken: Option[String]): Fox[List[Long]] = + segmentIdsOrdered: List[Long])(implicit tc: TokenContext): Fox[List[Long]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) segmentIdsOrderedProto = ListOfLong(items = segmentIdsOrdered) - result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$mappingName/agglomeratesForSegments").withTokenFromContext.silent .postProtoWithProtoResponse[ListOfLong, ListOfLong](segmentIdsOrderedProto)(ListOfLong) } yield result.items.toList - def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, - baseMappingName: String, - agglomerateId: Long, - userToken: Option[String]): Fox[AgglomerateGraph] = + def getAgglomerateGraph(remoteFallbackLayer: RemoteFallbackLayer, baseMappingName: String, agglomerateId: Long)( + implicit tc: TokenContext): Fox[AgglomerateGraph] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/agglomerates/$baseMappingName/agglomerateGraph/$agglomerateId").silent.withTokenFromContext.silent .getWithProtoResponse[AgglomerateGraph](AgglomerateGraph) } yield result - def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, - mappingName: String, - userToken: Option[String]): Fox[Long] = { - val cacheKey = (remoteFallbackLayer, mappingName, userToken) + def getLargestAgglomerateId(remoteFallbackLayer: RemoteFallbackLayer, mappingName: String)( + implicit tc: TokenContext): Fox[Long] = { + val cacheKey = (remoteFallbackLayer, mappingName, tc.userTokenOpt) largestAgglomerateIdCache.getOrLoad( cacheKey, k => @@ -121,26 +106,20 @@ class TSRemoteDatastoreClient @Inject()( ) } - def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer, userToken: Option[String]): Fox[Boolean] = + def hasSegmentIndexFile(remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Boolean] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex") - .addQueryStringOptional("token", userToken) - .silent - .getWithJsonResponse[Boolean] + hasIndexFile <- rpc(s"$remoteLayerUri/hasSegmentIndex").withTokenFromContext.silent.getWithJsonResponse[Boolean] } yield hasIndexFile def querySegmentIndex(remoteFallbackLayer: RemoteFallbackLayer, segmentId: Long, mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[Vec3Int]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[Vec3Int]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId") - .addQueryStringOptional("token", userToken) - .silent + positions <- rpc(s"$remoteLayerUri/segmentIndex/$segmentId").withTokenFromContext.silent .postJsonWithJsonResponse[GetSegmentIndexParameters, Seq[Vec3Int]](GetSegmentIndexParameters( mag, cubeSize = Vec3Int.ones, // Don't use the cubeSize parameter here (since we want to calculate indices later anyway) @@ -157,13 +136,10 @@ class TSRemoteDatastoreClient @Inject()( segmentIds: Seq[Long], mag: Vec3Int, mappingName: Option[String], // should be the baseMappingName in case of editable mappings - editableMappingTracingId: Option[String], - userToken: Option[String]): Fox[Seq[(Long, Seq[Vec3Int])]] = + editableMappingTracingId: Option[String])(implicit tc: TokenContext): Fox[Seq[(Long, Seq[Vec3Int])]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/segmentIndex") - .addQueryStringOptional("token", userToken) - .silent + result <- rpc(s"$remoteLayerUri/segmentIndex").withTokenFromContext.silent .postJsonWithJsonResponse[GetMultipleSegmentIndexParameters, Seq[SegmentIndexData]]( GetMultipleSegmentIndexParameters(segmentIds.toList, mag, @@ -173,25 +149,22 @@ class TSRemoteDatastoreClient @Inject()( } yield result.map(data => (data.segmentId, data.positions)) - def loadFullMeshStl(token: Option[String], - remoteFallbackLayer: RemoteFallbackLayer, - fullMeshRequest: FullMeshRequest): Fox[Array[Byte]] = + def loadFullMeshStl(remoteFallbackLayer: RemoteFallbackLayer, fullMeshRequest: FullMeshRequest)( + implicit tc: TokenContext): Fox[Array[Byte]] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl") - .addQueryStringOptional("token", token) + result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl").withTokenFromContext .postJsonWithBytesResponse(fullMeshRequest) } yield result - def voxelSizeForTracingWithCache(tracingId: String, token: Option[String]): Fox[VoxelSize] = - voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId, token)) + def voxelSizeForTracingWithCache(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = + voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId)) - private def voxelSizeForTracing(tracingId: String, token: Option[String]): Fox[VoxelSize] = + private def voxelSizeForTracing(tracingId: String)(implicit tc: TokenContext): Fox[VoxelSize] = for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) dataStoreUri <- dataStoreUriWithCache(dataSourceId.team, dataSourceId.name) - result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource") - .addQueryStringOptional("token", token) + result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource").withTokenFromContext .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt ?~> "could not determine voxel size of dataset" } yield scale diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index f1fd9555f4b..09e3e019e8c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -73,10 +74,10 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[DataSourceId] ) - override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) - .addQueryStringOptional("token", token) + .withTokenFromContext .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 0738dd3c8c3..63e700c8bcf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore @@ -61,16 +62,16 @@ class AnnotationTransactionService @Inject()( Some(expiry)) } yield () - private def handleUpdateGroupForTransaction(annotationId: String, - previousVersionFox: Fox[Long], - updateGroup: UpdateActionGroup, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + private def handleUpdateGroupForTransaction( + annotationId: String, + previousVersionFox: Fox[Long], + updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = for { previousCommittedVersion: Long <- previousVersionFox result <- if (previousCommittedVersion + 1 == updateGroup.version) { if (updateGroup.transactionGroupCount == updateGroup.transactionGroupIndex + 1) { // Received the last group of this transaction - commitWithPending(annotationId, updateGroup, userToken) + commitWithPending(annotationId, updateGroup) } else { for { _ <- saveUncommitted(annotationId, @@ -92,15 +93,15 @@ class AnnotationTransactionService @Inject()( // For an update group (that is the last of a transaction), fetch all previous uncommitted for the same transaction // and commit them all. - private def commitWithPending(annotationId: String, updateGroup: UpdateActionGroup, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Long] = + private def commitWithPending(annotationId: String, updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = for { previousActionGroupsToCommit <- getAllUncommittedFor(annotationId, updateGroup.transactionId) _ <- bool2Fox( previousActionGroupsToCommit .exists(_.transactionGroupIndex == 0) || updateGroup.transactionGroupCount == 1) ?~> s"Trying to commit a transaction without a group that has transactionGroupIndex 0." concatenatedGroup = concatenateUpdateGroupsOfTransaction(previousActionGroupsToCommit, updateGroup) - commitResult <- commitUpdates(annotationId, List(concatenatedGroup), userToken) + commitResult <- commitUpdates(annotationId, List(concatenatedGroup)) _ <- removeAllUncommittedFor(annotationId, updateGroup.transactionId) } yield commitResult @@ -146,22 +147,22 @@ class AnnotationTransactionService @Inject()( ) } - def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Long] = + def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = if (updateGroups.forall(_.transactionGroupCount == 1)) { - commitUpdates(annotationId, updateGroups, userToken) + commitUpdates(annotationId, updateGroups) } else { updateGroups.foldLeft(annotationService.currentMaterializableVersion(annotationId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup, userToken) + handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup) } } // Perform version check and commit the passed updates - private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Long] = + private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = for { - _ <- annotationService.reportUpdates(annotationId, updateGroups, userToken) + _ <- annotationService.reportUpdates(annotationId, updateGroups) currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) _ = logger.info(s"trying to commit ${updateGroups .map(_.actions.length) @@ -170,7 +171,7 @@ class AnnotationTransactionService @Inject()( previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { for { - _ <- handleUpdateGroup(annotationId, updateGroup, userToken) + _ <- handleUpdateGroup(annotationId, updateGroup) _ <- saveToHandledGroupIdStore(annotationId, updateGroup.transactionId, updateGroup.version, @@ -181,15 +182,15 @@ class AnnotationTransactionService @Inject()( } } yield newVersion - private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Unit] = + private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = for { updateActionsJson <- Fox.successful(Json.toJson(preprocessActionsForStorage(updateActionGroup))) _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, updateActionsJson) bucketMutatingActions = findBucketMutatingActions(updateActionGroup) _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( - volumeTracingService - .applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version, userToken)) + volumeTracingService.applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version)) } yield () private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index d1dd18cf044..2a1eda1d2dc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -39,6 +39,11 @@ case class AnnotationWithTracings( } } yield volumeTracing + def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = + for { + (info, _) <- editableMappingsByTracingId.get(tracingId) + } yield info + def version: Long = annotation.version def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index eb46397ca28..c0b5b191620 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox @@ -20,7 +21,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, BucketMutatingVolumeUpdateAction, - VolumeTracingService, VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} @@ -37,7 +37,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl extends KeyValueStoreImplicits with LazyLogging { - def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup], userToken: Option[String]): Fox[Unit] = + def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = for { _ <- remoteWebknossosClient.reportTracingUpdates( TracingUpdatesReport( @@ -46,7 +46,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer significantChangesCount = updateGroups.map(_.significantChangesCount).sum, viewChangesCount = updateGroups.map(_.viewChangesCount).sum, - userToken + tc.userTokenOpt )) } yield () @@ -115,17 +115,17 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield Json.toJson(updateActionGroupsJs) } - def get(annotationId: String, version: Option[Long], userToken: Option[String])( - implicit ec: ExecutionContext): Fox[AnnotationProto] = + def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationProto] = for { - withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, userToken) + withTracings <- getWithTracings(annotationId, version, List.empty, List.empty) } yield withTracings.annotation def getWithTracings(annotationId: String, version: Option[Long], requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + requestedVolumeTracingIds: List[String])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" annotation = annotationWithVersion.value @@ -133,17 +133,16 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl annotationId, version, requestedSkeletonTracingIds, - requestedVolumeTracingIds, - userToken) ?~> "applyUpdates.failed" + requestedVolumeTracingIds) ?~> "applyUpdates.failed" } yield updated - private def applyPendingUpdates( - annotation: AnnotationProto, - annotationId: String, - targetVersionOpt: Option[Long], - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + private def applyPendingUpdates(annotation: AnnotationProto, + annotationId: String, + targetVersionOpt: Option[Long], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) ?~> "determineTargetVersion.failed" updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" @@ -151,7 +150,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl updates, requestedSkeletonTracingIds, requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" - updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion, userToken) ?~> "applyUpdates.inner.failed" + updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion) ?~> "applyUpdates.inner.failed" } yield updated private def findTracingsForUpdates( @@ -189,11 +188,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, editableMappingsMap) } - private def applyUpdates(annotation: AnnotationWithTracings, - annotationId: String, - updates: List[UpdateAction], - targetVersion: Long, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + private def applyUpdates( + annotation: AnnotationWithTracings, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 6bb280fe104..9b9f9f191a9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -38,7 +38,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer def mergedFromContents(token: Option[String], persist: Boolean): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { val tracings: List[Option[SkeletonTracing]] = request.body for { mergedTracing <- Fox.box2Fox(tracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) @@ -59,13 +59,9 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { - tracing <- tracingService.find(annotationId, - tracingId, - version, - applyUpdates = true, - userToken = urlOrHeaderToken(token, request)) ?~> Messages( + tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index e2858994667..d398abbdc4f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -29,7 +29,7 @@ class TSAnnotationController @Inject()( def save(token: Option[String], annotationId: String): Action[AnnotationProto] = Action.async(validateProto[AnnotationProto]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { // TODO assert id does not already exist _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) @@ -43,12 +43,9 @@ class TSAnnotationController @Inject()( Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId)) { for { - _ <- annotationTransactionService.handleUpdateGroups(annotationId, - request.body, - urlOrHeaderToken(token, request)) + _ <- annotationTransactionService.handleUpdateGroups(annotationId, request.body) } yield Ok } } @@ -60,8 +57,7 @@ class TSAnnotationController @Inject()( newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { for { updateLog <- annotationService.updateActionLog(annotationId, newestVersion, oldestVersion) } yield Ok(updateLog) @@ -72,8 +68,7 @@ class TSAnnotationController @Inject()( def newestVersion(token: Option[String], annotationId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { for { newestVersion <- annotationService.currentMaterializableVersion(annotationId) } yield JsonOk(Json.obj("version" -> newestVersion)) @@ -84,7 +79,7 @@ class TSAnnotationController @Inject()( def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { statistics <- annotationService.updateActionStatistics(tracingId) } yield Ok(statistics) @@ -96,10 +91,9 @@ class TSAnnotationController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId), - urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { for { - annotationProto <- annotationService.get(annotationId, version, urlOrHeaderToken(token, request)) + annotationProto <- annotationService.get(annotationId, version) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index d39cf16c671..133453f00b8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -4,7 +4,6 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.{TracingSelector, TracingService} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} @@ -15,7 +14,6 @@ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends Controller { @@ -46,7 +44,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def save(token: Option[String]): Action[T] = Action.async(validateProto[T]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { val tracing = request.body tracingService.save(tracing, None, 0).map { newId => Ok(Json.toJson(newId)) @@ -59,7 +57,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def saveMultiple(token: Option[String]): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[T] => tracingOpt match { case Some(tracing) => tracingService.save(tracing, None, 0).map(Some(_)) @@ -75,13 +73,9 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, - tracingId, - version, - applyUpdates = true, - userToken = urlOrHeaderToken(token, request)) ?~> Messages( + tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } @@ -91,11 +85,9 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { - tracings <- tracingService.findMultiple(request.body, - applyUpdates = true, - userToken = urlOrHeaderToken(token, request)) + tracings <- tracingService.findMultiple(request.body, applyUpdates = true) } yield { Ok(tracings.toByteArray).as(protobufMimeType) } @@ -106,11 +98,9 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { - tracingOpts <- tracingService.findMultiple(request.body, - applyUpdates = true, - userToken = urlOrHeaderToken(token, request)) ?~> Messages( + tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( "tracing.notFound") tracingsWithIds = tracingOpts.zip(request.body).flatMap { case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) @@ -121,11 +111,8 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C tracingsWithIds.map(_._1), newId, newVersion = 0L, - toCache = !persist, - token) - newEditableMappingIdBox <- tracingService - .mergeEditableMappings(tracingsWithIds, urlOrHeaderToken(token, request)) - .futureBox + toCache = !persist) + newEditableMappingIdBox <- tracingService.mergeEditableMappings(tracingsWithIds).futureBox newEditableMappingIdOpt <- newEditableMappingIdBox match { case Full(newEditableMappingId) => Fox.successful(Some(newEditableMappingId)) case Empty => Fox.successful(None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 8621dbada90..4eb936fc829 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -27,7 +27,7 @@ import com.scalableminds.webknossos.datastore.services.{ FullMeshRequest, UserAccessRequest } -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, @@ -44,7 +44,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeSegmentStatisticsService, VolumeTracingService } -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits} +import com.scalableminds.webknossos.tracingstore.tracings.KeyValueStoreImplicits import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, @@ -67,6 +67,7 @@ class VolumeTracingController @Inject()( val config: TracingStoreConfig, val remoteDataStoreClient: TSRemoteDatastoreClient, val accessTokenService: TracingStoreAccessTokenService, + annotationTransactionService: AnnotationTransactionService, editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, val remoteWebknossosClient: TSRemoteWebknossosClient, @@ -97,14 +98,13 @@ class VolumeTracingController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) resolutions <- tracingService - .initializeWithData(annotationId, tracingId, tracing, initialData, resolutionRestrictions, token) + .initializeWithData(annotationId, tracingId, tracing, initialData, resolutionRestrictions) .toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) @@ -116,7 +116,7 @@ class VolumeTracingController @Inject()( def mergedFromContents(token: Option[String], persist: Boolean): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { _ <- Fox.successful(()) tracings = request.body @@ -136,13 +136,12 @@ class VolumeTracingController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") resolutions <- tracingService - .initializeWithDataMultiple(annotationId, tracingId, tracing, initialData, token) + .initializeWithDataMultiple(annotationId, tracingId, tracing, initialData) .toFox _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) @@ -160,13 +159,9 @@ class VolumeTracingController @Inject()( voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, - tracingId, - version, - userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) @@ -186,12 +181,11 @@ class VolumeTracingController @Inject()( def data(token: Option[String], annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) + editableMappingService.volumeData(tracing, tracingId, request.body) else tracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) } @@ -216,11 +210,9 @@ class VolumeTracingController @Inject()( boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - val userToken = urlOrHeaderToken(token, request) - accessTokenService.validateAccess(UserAccessRequest.webknossos, userToken) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = userToken) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") _ = logger.info(s"Duplicating volume tracing $tracingId...") datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) @@ -230,7 +222,7 @@ class VolumeTracingController @Inject()( remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt, userToken)) + editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt)) (newId, newTracing) <- tracingService.duplicate( annotationId, tracingId, @@ -241,11 +233,9 @@ class VolumeTracingController @Inject()( editPositionParsed, editRotationParsed, boundingBoxParsed, - newEditableMappingId, - userToken + newEditableMappingId ) - _ <- Fox.runIfOptionTrue(downsample)( - tracingService.downsample(annotationId, newId, tracingId, newTracing, userToken)) + _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(annotationId, newId, tracingId, newTracing)) } yield Ok(Json.toJson(newId)) } } @@ -257,18 +247,16 @@ class VolumeTracingController @Inject()( tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox largestSegmentId <- tracingService.importVolumeData(annotationId, tracingId, tracing, zipFile, - currentVersion, - urlOrHeaderToken(token, request)) + currentVersion) } yield Ok(Json.toJson(largestSegmentId)) } } @@ -280,20 +268,14 @@ class VolumeTracingController @Inject()( dryRun: Boolean): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos, urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- tracingService.currentVersion(tracingId) before = Instant.now - canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing, token) - processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)( - tracingService.addSegmentIndex(annotationId, - tracingId, - tracing, - currentVersion, - urlOrHeaderToken(token, request), - dryRun)) ?~> "addSegmentIndex.failed" + canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing) + processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)(tracingService + .addSegmentIndex(annotationId, tracingId, tracing, currentVersion, dryRun)) ?~> "addSegmentIndex.failed" currentVersionNew <- tracingService.currentVersion(tracingId) _ <- Fox.runIf(!dryRun)(bool2Fox( processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") @@ -312,7 +294,7 @@ class VolumeTracingController @Inject()( newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) } yield Ok(updateLog) @@ -324,16 +306,15 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") (vertices, neighbors) <- if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) - else tracingService.createAdHocMesh(annotationId, tracingId, request.body, urlOrHeaderToken(token, request)) + editableMappingService.createAdHocMesh(tracing, tracingId, request.body) + else tracingService.createAdHocMesh(annotationId, tracingId, request.body) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) @@ -345,9 +326,9 @@ class VolumeTracingController @Inject()( def loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" + data: Array[Byte] <- fullMeshService.loadFor(annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } } @@ -360,9 +341,9 @@ class VolumeTracingController @Inject()( def findData(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - positionOpt <- tracingService.findData(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + positionOpt <- tracingService.findData(annotationId, tracingId) } yield { Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) } @@ -374,17 +355,15 @@ class VolumeTracingController @Inject()( tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback( - mappingName, - remoteFallbackLayer, - agglomerateId, - urlOrHeaderToken(token, request)) + agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(mappingName, + remoteFallbackLayer, + agglomerateId) } yield Ok(agglomerateSkeletonBytes) } } @@ -392,9 +371,9 @@ class VolumeTracingController @Inject()( def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" _ <- assertMappingIsNotLocked(tracing) _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" @@ -405,20 +384,20 @@ class VolumeTracingController @Inject()( isLocked = Some(true), actionTracingId = tracingId, actionTimestamp = Some(System.currentTimeMillis())) - /*_ <- tracingService.handleUpdateGroup( // TODO - tracingId, - UpdateActionGroup(tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0), - tracing.version, - urlOrHeaderToken(token, request) - )*/ + _ <- annotationTransactionService + .handleUpdateGroups( // TODO replace this route by the update action only? address editable mappings by volume tracing id? + annotationId, + List( + UpdateActionGroup(tracing.version + 1, + System.currentTimeMillis(), + None, + List(volumeUpdate), + None, + None, + "dummyTransactionId", + 1, + 0)) + ) infoJson <- editableMappingService.infoJson(tracingId = tracingId, editableMappingId = editableMappingId, editableMappingInfo = editableMappingInfo, @@ -434,12 +413,12 @@ class VolumeTracingController @Inject()( def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = Action.async(validateJson[MinCutParameters]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer, token) + edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer) } yield Ok(Json.toJson(edges)) } } @@ -450,14 +429,12 @@ class VolumeTracingController @Inject()( tracingId: String): Action[NeighborsParameters] = Action.async(validateJson[NeighborsParameters]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, - remoteFallbackLayer, - token) + (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, remoteFallbackLayer) } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) } } @@ -467,9 +444,9 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[List[UpdateActionGroup]] = Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracing.mappingName.toFox _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) @@ -482,15 +459,11 @@ class VolumeTracingController @Inject()( statistics = None, significantChangesCount = updateGroup.actions.length, viewChangesCount = 0, - urlOrHeaderToken(token, request) + tokenContextForRequest.userTokenOpt ) _ <- remoteWebknossosClient.reportTracingUpdates(report) remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - _ <- editableMappingService.update(mappingName, - updateGroup, - updateGroup.version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) + _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version, remoteFallbackLayer) } yield Ok } } @@ -501,15 +474,11 @@ class VolumeTracingController @Inject()( version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- editableMappingService.getInfo(mappingName, - version, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) + editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) infoJson <- editableMappingService.infoJson(tracingId = tracingId, editableMappingId = mappingName, editableMappingInfo = editableMappingInfo, @@ -524,23 +493,21 @@ class VolumeTracingController @Inject()( tracingId: String): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) editableMappingId <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( editableMappingId, requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = urlOrHeaderToken(token, request)) + remoteFallbackLayer = remoteFallbackLayer) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, editableMappingVersion, editableMappingId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) + remoteFallbackLayer) agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) } @@ -553,16 +520,13 @@ class VolumeTracingController @Inject()( agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(mappingName, - agglomerateId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) + .getAgglomerateGraphForId(mappingName, agglomerateId, remoteFallbackLayer) .futureBox segmentIds <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) @@ -579,9 +543,9 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracingService.baseMappingName(tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentVolume(annotationId, @@ -589,8 +553,7 @@ class VolumeTracingController @Inject()( segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentVolumes)) } @@ -600,9 +563,9 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracingService.baseMappingName(tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, @@ -610,8 +573,7 @@ class VolumeTracingController @Inject()( segmentId, request.body.mag, mappingName, - request.body.additionalCoordinates, - urlOrHeaderToken(token, request)) + request.body.additionalCoordinates) } } yield Ok(Json.toJson(segmentBoundingBoxes)) } @@ -622,10 +584,10 @@ class VolumeTracingController @Inject()( tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId, urlOrHeaderToken(token, request)) - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) + fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId) + tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracingService.baseMappingName(tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService @@ -637,8 +599,7 @@ class VolumeTracingController @Inject()( additionalCoordinates = request.body.additionalCoordinates, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), mappingName = mappingName, - editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId), - userToken = urlOrHeaderToken(token, request) + editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId) ) bucketPositionsForCubeSize = bucketPositionsRaw.values .map(vec3IntFromProto) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index ddc2b73a5f9..856e0545ecb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.mvc.ExtendedController import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -61,10 +62,9 @@ class VolumeTracingZarrStreamingController @Inject()( tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -84,10 +84,9 @@ class VolumeTracingZarrStreamingController @Inject()( tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -102,10 +101,9 @@ class VolumeTracingZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -127,10 +125,9 @@ class VolumeTracingZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -141,10 +138,9 @@ class VolumeTracingZarrStreamingController @Inject()( def zArray(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -176,10 +172,9 @@ class VolumeTracingZarrStreamingController @Inject()( def zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -223,7 +218,7 @@ class VolumeTracingZarrStreamingController @Inject()( def zGroup(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } } @@ -238,10 +233,9 @@ class VolumeTracingZarrStreamingController @Inject()( annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND @@ -257,10 +251,9 @@ class VolumeTracingZarrStreamingController @Inject()( annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND @@ -279,10 +272,9 @@ class VolumeTracingZarrStreamingController @Inject()( tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), @@ -305,10 +297,9 @@ class VolumeTracingZarrStreamingController @Inject()( coordinates: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId, userToken = urlOrHeaderToken(token, request)) ?~> Messages( - "tracing.notFound") ~> NOT_FOUND + tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -329,7 +320,7 @@ class VolumeTracingZarrStreamingController @Inject()( additionalCoordinates = additionalCoordinates ) (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, List(wkRequest), urlOrHeaderToken(token, request)) + editableMappingService.volumeData(tracing, tracingId, List(wkRequest)) else tracingService.data(tracingId, tracing, List(wkRequest)) dataWithFallback <- getFallbackLayerDataIfEmpty(tracing, tracingId, @@ -338,22 +329,21 @@ class VolumeTracingZarrStreamingController @Inject()( magParsed, Vec3Int(x, y, z), cubeSize, - additionalCoordinates, - urlOrHeaderToken(token, request)) ~> NOT_FOUND + additionalCoordinates) ~> NOT_FOUND } yield Ok(dataWithFallback) } } } - private def getFallbackLayerDataIfEmpty(tracing: VolumeTracing, - tracingId: String, - data: Array[Byte], - missingBucketIndices: List[Int], - mag: Vec3Int, - position: Vec3Int, - cubeSize: Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - urlToken: Option[String]): Fox[Array[Byte]] = + private def getFallbackLayerDataIfEmpty( + tracing: VolumeTracing, + tracingId: String, + data: Array[Byte], + missingBucketIndices: List[Int], + mag: Vec3Int, + position: Vec3Int, + cubeSize: Int, + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = if (missingBucketIndices.nonEmpty) { for { remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) ?~> "No data at coordinates, no fallback layer defined" @@ -367,8 +357,7 @@ class VolumeTracingZarrStreamingController @Inject()( additionalCoordinates = additionalCoordinates ) (fallbackData, fallbackMissingBucketIndices) <- remoteDataStoreClient.getData(remoteFallbackLayer, - List(request), - urlToken) + List(request)) _ <- bool2Fox(fallbackMissingBucketIndices.isEmpty) ?~> "No data at coordinations in fallback layer" } yield fallbackData } else Fox.successful(data) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index e95880ae974..5d347c7a4b0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox @@ -36,10 +37,9 @@ trait FallbackDataHelper { datasetId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) } yield RemoteFallbackLayer(datasetId.team, datasetId.name, layerName, tracing.elementClass) - def getFallbackDataFromDatastore( - remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebknossosDataRequest], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[(Array[Byte], List[Int])] = - fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, userToken), - k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests, k.userToken)) + def getFallbackDataFromDatastore(remoteFallbackLayer: RemoteFallbackLayer, dataRequests: List[WebknossosDataRequest])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], List[Int])] = + fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, tc.userTokenOpt), + k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 7250b28eb75..9d0291c6c51 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,13 +1,9 @@ package com.scalableminds.webknossos.tracingstore.tracings +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.{ - AnnotationWithTracings, - TSAnnotationService, - UpdateActionGroup -} +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging @@ -106,6 +102,7 @@ trait TracingService[T <: GeneratedMessage] def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) + /* // TODO ? add this to migration? private def migrateTracing(tracingFox: Fox[T], tracingId: String): Fox[T] = tracingMigrationService.migrateTracing(tracingFox).flatMap { case (tracing, hasChanged) => @@ -114,6 +111,7 @@ trait TracingService[T <: GeneratedMessage] else Fox.successful(tracing) } + */ def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) @@ -121,18 +119,14 @@ trait TracingService[T <: GeneratedMessage] tracingId: String, version: Option[Long] = None, useCache: Boolean = true, - applyUpdates: Boolean = false, - userToken: Option[String]): Fox[T] + applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[T] - def findMultiple(selectors: List[Option[TracingSelector]], - useCache: Boolean = true, - applyUpdates: Boolean = false, - userToken: Option[String]): Fox[List[Option[T]]] = + def findMultiple(selectors: List[Option[TracingSelector]], useCache: Boolean = true, applyUpdates: Boolean = false)( + implicit tc: TokenContext): Fox[List[Option[T]]] = Fox.combined { selectors.map { case Some(selector) => - find("dummyAnnotationid", selector.tracingId, selector.version, useCache, applyUpdates, userToken = userToken) - .map(Some(_)) + find("dummyAnnotationid", selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) case None => Fox.successful(None) } } @@ -175,8 +169,7 @@ trait TracingService[T <: GeneratedMessage] tracings: Seq[T], newId: String, newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] + toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] - def mergeEditableMappings(tracingsWithIds: List[(T, String)], userToken: Option[String]): Fox[String] + def mergeEditableMappings(tracingsWithIds: List[(T, String)])(implicit tc: TokenContext): Fox[String] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 47965cee726..f526a4247f2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox @@ -37,8 +38,7 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP (editableMappingInfo, editableMappingVersion) <- layer.editableMappingService.getInfoAndActualVersion( editableMappingId, requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer, - userToken = layer.token) + remoteFallbackLayer = remoteFallbackLayer)(layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, @@ -48,18 +48,17 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP version = None, additionalCoordinates = readInstruction.bucket.additionalCoordinates ) - (unmappedData, indices) <- layer.editableMappingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - layer.token) + (unmappedData, indices) <- layer.editableMappingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) _ <- bool2Fox(indices.isEmpty) unmappedDataTyped <- layer.editableMappingService.bytesToUnsignedInt(unmappedData, layer.tracing.elementClass) segmentIds = layer.editableMappingService.collectSegmentIds(unmappedDataTyped) - relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds(segmentIds, - editableMappingInfo, - editableMappingVersion, - editableMappingId, - remoteFallbackLayer, - layer.token) + relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds( + segmentIds, + editableMappingInfo, + editableMappingVersion, + editableMappingId, + remoteFallbackLayer)(layer.tokenContext) mappedData: Array[Byte] <- layer.editableMappingService.mapData(unmappedDataTyped, relevantMapping, layer.elementClass) @@ -72,7 +71,7 @@ case class EditableMappingLayer(name: String, resolutions: List[Vec3Int], largestSegmentId: Option[Long], elementClass: ElementClass.Value, - token: Option[String], + tokenContext: TokenContext, tracing: VolumeTracing, tracingId: String, editableMappingService: EditableMappingService) @@ -90,7 +89,7 @@ case class EditableMappingLayer(name: String, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new EditableMappingBucketProvider(layer = this) - override def bucketProviderCacheKey: String = s"$name-token=$token" + override def bucketProviderCacheKey: String = s"$name-token=${tokenContext.userTokenOpt}" override def mappings: Option[Set[String]] = None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 56039554e54..e82f75cb1e3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant @@ -21,7 +22,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } -import com.scalableminds.webknossos.tracingstore.annotation.{UpdateAction, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -92,6 +93,7 @@ object NodeWithPosition { class EditableMappingService @Inject()( val tracingDataStore: TracingDataStore, val adHocMeshServiceHolder: AdHocMeshServiceHolder, + annotationService: TSAnnotationService, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient )(implicit ec: ExecutionContext) @@ -149,15 +151,11 @@ class EditableMappingService @Inject()( def duplicate(editableMappingIdOpt: Option[String], version: Option[Long], - remoteFallbackLayerBox: Box[RemoteFallbackLayer], - userToken: Option[String]): Fox[String] = + remoteFallbackLayerBox: Box[RemoteFallbackLayer])(implicit tc: TokenContext): Fox[String] = for { editableMappingId <- editableMappingIdOpt ?~> "duplicate on editable mapping without id" remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" - editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, - version, - remoteFallbackLayer, - userToken) + editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer) newIdAndInfoV0 <- create(editableMappingInfoAndVersion._1.baseMappingName) newId = newIdAndInfoV0._1 newVersion = editableMappingInfoAndVersion._2 @@ -204,12 +202,17 @@ class EditableMappingService @Inject()( } yield () } - def getInfo(editableMappingId: String, - version: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = + def getInfoNEW(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit tc: TokenContext): Fox[EditableMappingInfo] = for { - (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer, userToken) + annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty) + tracing <- annotation.getEditableMappingInfo(tracingId) + } yield tracing + + def getInfo(editableMappingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[EditableMappingInfo] = + for { + (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer) } yield info def getBaseMappingName(editableMappingId: String): Fox[Option[String]] = @@ -222,45 +225,40 @@ class EditableMappingService @Inject()( case _ => None } - def getInfoAndActualVersion(editableMappingId: String, - requestedVersion: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(EditableMappingInfo, Long)] = + def getInfoAndActualVersion( + editableMappingId: String, + requestedVersion: Option[Long] = None, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(EditableMappingInfo, Long)] = for { desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, requestedVersion) materializedInfo <- materializedInfoCache.getOrLoad( (editableMappingId, desiredVersion), - _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer, userToken)) + _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer)) } yield (materializedInfo, desiredVersion) def update(editableMappingId: String, updateActionGroup: UpdateActionGroup, newVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = for { actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) - _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer, userToken) ?~> "editableMapping.dryUpdate.failed" + _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer) ?~> "editableMapping.dryUpdate.failed" _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) } yield () private def dryApplyUpdates(editableMappingId: String, newVersion: Long, updates: List[UpdateAction], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = for { - (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, - None, - remoteFallbackLayer, - userToken) + (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, None, remoteFallbackLayer) updater = new EditableMappingUpdater( editableMappingId, previousInfo.baseMappingName, previousVersion, newVersion, remoteFallbackLayer, - userToken, + tc, remoteDatastoreClient, this, tracingDataStore, @@ -269,10 +267,8 @@ class EditableMappingService @Inject()( updated <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" } yield () - def applyPendingUpdates(editableMappingId: String, - desiredVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[EditableMappingInfo] = + def applyPendingUpdates(editableMappingId: String, desiredVersion: Long, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[EditableMappingInfo] = for { closestMaterializedWithVersion <- getClosestMaterialized(editableMappingId, desiredVersion) updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version) @@ -286,7 +282,7 @@ class EditableMappingService @Inject()( closestMaterializedWithVersion.version, desiredVersion, remoteFallbackLayer, - userToken, + tc, remoteDatastoreClient, this, tracingDataStore, @@ -343,32 +339,28 @@ class EditableMappingService @Inject()( def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], segmentIdOpt: Option[Long], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = segmentIdOpt match { case Some(segmentId) => Fox.successful(segmentId) - case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag, userToken) + case None => findSegmentIdAtPosition(remoteFallbackLayer, positionOpt, mag) } private def findSegmentIdAtPosition(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], - mag: Vec3Int, - userToken: Option[String]): Fox[Long] = + mag: Vec3Int)(implicit tc: TokenContext): Fox[Long] = for { pos <- positionOpt.toFox ?~> "segment id or position is required in editable mapping action" - voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(userToken, remoteFallbackLayer, pos, mag) + voxelAsBytes: Array[Byte] <- remoteDatastoreClient.getVoxelAtPosition(remoteFallbackLayer, pos, mag) voxelAsLongArray: Array[Long] <- bytesToLongs(voxelAsBytes, remoteFallbackLayer.elementClass) _ <- Fox.bool2Fox(voxelAsLongArray.length == 1) ?~> s"Expected one, got ${voxelAsLongArray.length} segment id values for voxel." voxelAsLong <- voxelAsLongArray.headOption } yield voxelAsLong - def volumeData(tracing: VolumeTracing, - tracingId: String, - dataRequests: DataRequestCollection, - userToken: Option[String]): Fox[(Array[Byte], List[Int])] = + def volumeData(tracing: VolumeTracing, tracingId: String, dataRequests: DataRequestCollection)( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { editableMappingId <- tracing.mappingName.toFox - dataLayer = editableMappingLayer(editableMappingId, tracing, tracingId, userToken) + dataLayer = editableMappingLayer(editableMappingId, tracing, tracingId) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) @@ -425,12 +417,12 @@ class EditableMappingService @Inject()( asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) } yield asSequence - def generateCombinedMappingForSegmentIds(segmentIds: Set[Long], - editableMapping: EditableMappingInfo, - editableMappingVersion: Long, - editableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = + def generateCombinedMappingForSegmentIds( + segmentIds: Set[Long], + editableMapping: EditableMappingInfo, + editableMappingVersion: Long, + editableMappingId: String, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = for { editableMappingForSegmentIds <- getSegmentToAgglomerateForSegmentIds(segmentIds, editableMappingId, @@ -439,25 +431,22 @@ class EditableMappingService @Inject()( segmentIdsInBaseMapping: Set[Long] = segmentIds.diff(segmentIdsInEditableMapping) baseMappingSubset <- getBaseSegmentToAgglomerate(editableMapping.baseMappingName, segmentIdsInBaseMapping, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) } yield editableMappingForSegmentIds ++ baseMappingSubset def getAgglomerateSkeletonWithFallback(editableMappingId: String, remoteFallbackLayer: RemoteFallbackLayer, - agglomerateId: Long, - userToken: Option[String]): Fox[Array[Byte]] = + agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { // called here to ensure updates are applied - editableMappingInfo <- getInfo(editableMappingId, version = None, remoteFallbackLayer, userToken) - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, userToken).futureBox + editableMappingInfo <- getInfo(editableMappingId, version = None, remoteFallbackLayer) + agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful( agglomerateGraphToSkeleton(editableMappingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) case Empty => - remoteDatastoreClient.getAgglomerateSkeleton(userToken, - remoteFallbackLayer, + remoteDatastoreClient.getAgglomerateSkeleton(remoteFallbackLayer, editableMappingInfo.baseMappingName, agglomerateId) case f: Failure => f.toFox @@ -499,16 +488,13 @@ class EditableMappingService @Inject()( skeleton.toByteArray } - def getBaseSegmentToAgglomerate(mappingName: String, - segmentIds: Set[Long], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Map[Long, Long]] = { + def getBaseSegmentToAgglomerate(mappingName: String, segmentIds: Set[Long], remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[Map[Long, Long]] = { val segmentIdsOrdered = segmentIds.toList for { agglomerateIdsOrdered <- remoteDatastoreClient.getAgglomerateIdsForSegmentIds(remoteFallbackLayer, mappingName, - segmentIdsOrdered, - userToken) + segmentIdsOrdered) } yield segmentIdsOrdered.zip(agglomerateIdsOrdered).toMap } @@ -545,29 +531,25 @@ class EditableMappingService @Inject()( bytes = UnsignedIntegerArray.toByteArray(unsignedIntArray, elementClass) } yield bytes - private def editableMappingLayer(mappingName: String, - tracing: VolumeTracing, - tracingId: String, - userToken: Option[String]): EditableMappingLayer = + private def editableMappingLayer(mappingName: String, tracing: VolumeTracing, tracingId: String)( + implicit tc: TokenContext): EditableMappingLayer = EditableMappingLayer( mappingName, tracing.boundingBox, resolutions = tracing.resolutions.map(vec3IntFromProto).toList, largestSegmentId = Some(0L), elementClass = tracing.elementClass, - userToken, + tc, tracing = tracing, tracingId = tracingId, editableMappingService = this ) - def createAdHocMesh(tracing: VolumeTracing, - tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = + def createAdHocMesh(tracing: VolumeTracing, tracingId: String, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = for { mappingName <- tracing.mappingName.toFox - segmentationLayer = editableMappingLayer(mappingName, tracing, tracingId, userToken) + segmentationLayer = editableMappingLayer(mappingName, tracing, tracingId) adHocMeshRequest = AdHocMeshRequest( dataSource = None, dataLayer = segmentationLayer, @@ -581,14 +563,14 @@ class EditableMappingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def getAgglomerateGraphForId(mappingId: String, - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], - requestedVersion: Option[Long] = None): Fox[AgglomerateGraph] = + def getAgglomerateGraphForId( + mappingId: String, + agglomerateId: Long, + remoteFallbackLayer: RemoteFallbackLayer, + requestedVersion: Option[Long] = None)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { // called here to ensure updates are applied - (_, version) <- getInfoAndActualVersion(mappingId, requestedVersion, remoteFallbackLayer, userToken) + (_, version) <- getInfoAndActualVersion(mappingId, requestedVersion, remoteFallbackLayer) agglomerateGraph <- agglomerateToGraphCache.getOrLoad( (mappingId, agglomerateId, version), _ => @@ -601,41 +583,32 @@ class EditableMappingService @Inject()( ) } yield agglomerateGraph - def getAgglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, - editableMappingId: String, - version: Option[Long], - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[AgglomerateGraph] = - for { - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, - agglomerateId, - remoteFallbackLayer, - userToken, - version).futureBox + def getAgglomerateGraphForIdWithFallback( + mapping: EditableMappingInfo, + editableMappingId: String, + version: Option[Long], + agglomerateId: Long, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = + for { + agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, version).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => - remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, - mapping.baseMappingName, - agglomerateId, - userToken) + remoteDatastoreClient.getAgglomerateGraph(remoteFallbackLayer, mapping.baseMappingName, agglomerateId) case f: Failure => f.toFox } } yield agglomerateGraph - def agglomerateGraphMinCut(parameters: MinCutParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[List[EdgeWithPositions]] = + def agglomerateGraphMinCut(parameters: MinCutParameters, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) + mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer) agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, parameters.editableMappingId, None, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) edgesToCut <- minCut(agglomerateGraph, parameters.segmentId1, parameters.segmentId2) ?~> "Could not calculate min-cut on agglomerate graph." edgesWithPositions = annotateEdgesWithPositions(edgesToCut, agglomerateGraph) } yield edgesWithPositions @@ -692,18 +665,16 @@ class EditableMappingService @Inject()( ) } - def agglomerateGraphNeighbors(parameters: NeighborsParameters, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[(Long, Seq[NodeWithPosition])] = + def agglomerateGraphNeighbors(parameters: NeighborsParameters, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer, userToken) + mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer) agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, parameters.editableMappingId, None, parameters.agglomerateId, - remoteFallbackLayer, - userToken) + remoteFallbackLayer) neighborNodes = neighbors(agglomerateGraph, parameters.segmentId) nodesWithPositions = annotateNodesWithPositions(neighborNodes, agglomerateGraph) } yield (parameters.segmentId, nodesWithPositions) @@ -718,29 +689,24 @@ class EditableMappingService @Inject()( neighborNodes } - def merge(editableMappingIds: List[String], - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[String] = + def merge(editableMappingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[String] = for { firstMappingId <- editableMappingIds.headOption.toFox before = Instant.now - newMappingId <- duplicate(Some(firstMappingId), version = None, Some(remoteFallbackLayer), userToken) + newMappingId <- duplicate(Some(firstMappingId), version = None, Some(remoteFallbackLayer)) _ <- Fox.serialCombined(editableMappingIds.tail)(editableMappingId => - mergeInto(newMappingId, editableMappingId, remoteFallbackLayer, userToken)) + mergeInto(newMappingId, editableMappingId, remoteFallbackLayer)) _ = logger.info(s"Merging ${editableMappingIds.length} editable mappings took ${Instant.since(before)}") } yield newMappingId // read as: merge source into target (mutate target) private def mergeInto(targetEditableMappingId: String, sourceEditableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String]): Fox[Unit] = + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = for { targetNewestVersion <- getClosestMaterializableVersionOrZero(targetEditableMappingId, None) - sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceEditableMappingId, - None, - remoteFallbackLayer, - userToken) + sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceEditableMappingId, None, remoteFallbackLayer) sourceNewestVersion = sourceNewestMaterializedWithVersion._2 updateActionsWithVersions <- getUpdateActionsWithVersions(sourceEditableMappingId, sourceNewestVersion, 0L) updateActionsToApply = updateActionsWithVersions.map(_._2).reverse.flatten @@ -750,7 +716,7 @@ class EditableMappingService @Inject()( targetNewestVersion, targetNewestVersion + sourceNewestVersion, remoteFallbackLayer, - userToken, + tc, remoteDatastoreClient, this, tracingDataStore, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 9f1b7c1a0d8..c2a281f39f9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.{AgglomerateEdge, AgglomerateGraph} @@ -36,7 +37,7 @@ class EditableMappingUpdater( oldVersion: Long, newVersion: Long, remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String], + tokenContext: TokenContext, remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, tracingDataStore: TracingDataStore, @@ -127,13 +128,11 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) + update.mag)(tokenContext) agglomerateId <- agglomerateIdForSplitAction(update, segmentId1) agglomerateGraph <- agglomerateGraphForIdWithFallback(editableMappingInfo, agglomerateId) _ = if (segmentId1 == 0) @@ -196,7 +195,7 @@ class EditableMappingUpdater( case Some(agglomerateId) => Fox.successful(agglomerateId) case None => editableMappingService - .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer, userToken) + .getBaseSegmentToAgglomerate(baseMappingName, Set(segmentId), remoteFallbackLayer)(tokenContext) .flatMap(baseSegmentToAgglomerate => baseSegmentToAgglomerate.get(segmentId)) } } yield agglomerateId @@ -241,7 +240,7 @@ class EditableMappingUpdater( Some(oldVersion), agglomerateId, remoteFallbackLayer, - userToken) + )(tokenContext) } } @@ -336,8 +335,7 @@ class EditableMappingUpdater( private def largestAgglomerateId(mapping: EditableMappingInfo): Fox[Long] = for { largestBaseAgglomerateId <- remoteDatastoreClient.getLargestAgglomerateId(remoteFallbackLayer, - mapping.baseMappingName, - userToken) + mapping.baseMappingName)(tokenContext) } yield math.max(mapping.largestAgglomerateId, largestBaseAgglomerateId) private def applyMergeAction(mapping: EditableMappingInfo, update: MergeAgglomerateUpdateAction)( @@ -346,13 +344,11 @@ class EditableMappingUpdater( segmentId1 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition1, update.segmentId1, - update.mag, - userToken) + update.mag)(tokenContext) segmentId2 <- editableMappingService.findSegmentIdAtPositionIfNeeded(remoteFallbackLayer, update.segmentPosition2, update.segmentId2, - update.mag, - userToken) + update.mag)(tokenContext) _ = if (segmentId1 == 0) logger.warn( s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") @@ -420,8 +416,7 @@ class EditableMappingUpdater( _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" oldInfo <- editableMappingService.getInfo(editableMappingId, Some(revertAction.sourceVersion), - remoteFallbackLayer, - userToken) + remoteFallbackLayer)(tokenContext) _ = segmentToAgglomerateBuffer.clear() _ = agglomerateToGraphBuffer.clear() segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( @@ -453,8 +448,7 @@ class EditableMappingUpdater( .getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, - userToken, - Some(revertAction.sourceVersion)) + Some(revertAction.sourceVersion))(tokenContext) .futureBox .map { case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index e07a99558da..aaff678f9c3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing @@ -8,7 +9,7 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationWithTracings, TSAnnotationService} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import net.liftweb.common.{Box, Full} @@ -44,13 +45,12 @@ class SkeletonTracingService @Inject()( tracingId: String, version: Option[Long] = None, useCache: Boolean = true, - applyUpdates: Boolean = false, - userToken: Option[String]): Fox[SkeletonTracing] = + applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[SkeletonTracing] = if (tracingId == TracingIds.dummyTracingId) Fox.successful(dummyTracing) else { for { - annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty, userToken) // TODO is applyUpdates still needed? + annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty) // TODO is applyUpdates still needed? tracing <- annotation.getSkeleton(tracingId) } yield tracing } @@ -128,12 +128,11 @@ class SkeletonTracingService @Inject()( tracings: Seq[SkeletonTracing], newId: String, newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = + toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = Fox.successful(MergedVolumeStats.empty()) def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance - def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)], userToken: Option[String]): Fox[String] = + def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)])(implicit tc: TokenContext): Fox[String] = Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 726de0f8af9..381f3420b5a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox @@ -33,20 +34,19 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, with FullMeshHelper with LazyLogging { - def loadFor(token: Option[String], annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)( - implicit ec: ExecutionContext): Fox[Array[Byte]] = + def loadFor(annotationId: String, tracingId: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { - tracing <- volumeTracingService.find(annotationId, tracingId, userToken = token) ?~> "tracing.notFound" + tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshfile(token, tracing, tracingId, fullMeshRequest) - else loadFullMeshFromAdHoc(token, tracing, annotationId, tracingId, fullMeshRequest) + loadFullMeshFromMeshfile(tracing, tracingId, fullMeshRequest) + else loadFullMeshFromAdHoc(tracing, annotationId, tracingId, fullMeshRequest) } yield data - private def loadFullMeshFromMeshfile( - token: Option[String], - tracing: VolumeTracing, - tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def loadFullMeshFromMeshfile(tracing: VolumeTracing, tracingId: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) baseMappingName <- volumeTracingService.baseMappingName(tracing) @@ -55,24 +55,23 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, editableMappingTracingId = Some(tracingId), mappingType = Some("HDF5")) else fullMeshRequest - array <- remoteDatastoreClient.loadFullMeshStl(token, remoteFallbackLayer, fullMeshRequestAdapted) + array <- remoteDatastoreClient.loadFullMeshStl(remoteFallbackLayer, fullMeshRequestAdapted) } yield array - private def loadFullMeshFromAdHoc(token: Option[String], - tracing: VolumeTracing, - annotationId: String, - tracingId: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc( + tracing: VolumeTracing, + annotationId: String, + tracingId: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" _ <- bool2Fox(tracing.resolutions.contains(vec3IntToProto(mag))) ?~> "mag.notPresentInTracing" before = Instant.now - voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId, token) ?~> "voxelSize.failedToFetch" + voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId) ?~> "voxelSize.failedToFetch" verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) - getAllAdHocChunksWithSegmentIndex(token, annotationId, tracing, tracingId, mag, voxelSize, fullMeshRequest) + getAllAdHocChunksWithSegmentIndex(annotationId, tracing, tracingId, mag, voxelSize, fullMeshRequest) else getAllAdHocChunksWithNeighborLogic( - token, tracing, annotationId, tracingId, @@ -88,15 +87,14 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, } yield array private def getAllAdHocChunksWithSegmentIndex( - token: Option[String], annotationId: String, tracing: VolumeTracing, tracingId: String, mag: Vec3Int, voxelSize: VoxelSize, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId, userToken = token) + fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId) mappingName <- volumeTracingService.baseMappingName(tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -108,8 +106,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, mappingName = mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), fullMeshRequest.additionalCoordinates, - AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - token + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -129,13 +126,12 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates, findNeighbors = false ) - loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, annotationId, tracingId) + loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) } allVertices = vertexChunksWithNeighbors.map(_._1) } yield allVertices - private def getAllAdHocChunksWithNeighborLogic(token: Option[String], - tracing: VolumeTracing, + private def getAllAdHocChunksWithNeighborLogic(tracing: VolumeTracing, annotationId: String, tracingId: String, mag: Vec3Int, @@ -145,7 +141,8 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, chunkSize: Vec3Int, visited: collection.mutable.Set[VoxelPosition] = collection.mutable.Set[VoxelPosition]())( - implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[Array[Float]]] = for { topLeft <- topLeftOpt.toFox ?~> "seedPosition.neededForAdHoc" adHocMeshRequest = WebknossosAdHocMeshRequest( @@ -159,16 +156,11 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, fullMeshRequest.additionalCoordinates ) _ = visited += topLeft - (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, - tracing, - adHocMeshRequest, - annotationId, - tracingId) + (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(tracing, adHocMeshRequest, annotationId, tracingId) nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) _ = visited ++= nextPositions neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => - getAllAdHocChunksWithNeighborLogic(token, - tracing, + getAllAdHocChunksWithNeighborLogic(tracing, annotationId, tracingId, mag, @@ -181,12 +173,11 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, allVertices: List[Array[Float]] = vertices +: neighborVerticesNested.flatten } yield allVertices - private def loadMeshChunkFromAdHoc(token: Option[String], - tracing: VolumeTracing, + private def loadMeshChunkFromAdHoc(tracing: VolumeTracing, adHocMeshRequest: WebknossosAdHocMeshRequest, annotationId: String, - tracingId: String): Fox[(Array[Float], List[Int])] = + tracingId: String)(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) - else volumeTracingService.createAdHocMesh(annotationId, tracingId, adHocMeshRequest, token) + editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest) + else volumeTracingService.createAdHocMesh(annotationId, tracingId, adHocMeshRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala index 755cc665464..9d0d35cf2e3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexBuffer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto @@ -33,7 +34,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer], additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String]) + tc: TokenContext) extends KeyValueStoreImplicits with SegmentIndexKeyHelper with ProtoGeometryImplicits @@ -86,12 +87,7 @@ class VolumeSegmentIndexBuffer(tracingId: String, .fillEmpty(ListOfVec3IntProto.of(Seq())) data <- fallbackLayer match { case Some(layer) if fossilDbData.length == 0 => - remoteDatastoreClient.querySegmentIndex(layer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId)(tc) case _ => Fox.successful(fossilDbData.values.map(vec3IntFromProto)) } } yield ListOfVec3IntProto(data.map(vec3IntToProto)) @@ -168,13 +164,8 @@ class VolumeSegmentIndexBuffer(tracingId: String, fileBucketPositions <- fallbackLayer match { case Some(layer) => for { - fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)( - remoteDatastoreClient.querySegmentIndexForMultipleSegments(layer, - missesSoFar, - mag, - mappingName, - editableMappingTracingId, - userToken)) + fileBucketPositionsOpt <- Fox.runIf(missesSoFar.nonEmpty)(remoteDatastoreClient + .querySegmentIndexForMultipleSegments(layer, missesSoFar, mag, mappingName, editableMappingTracingId)(tc)) fileBucketPositions = fileBucketPositionsOpt.getOrElse(Seq()) _ = fileBucketPositions.map { case (segmentId, positions) => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index 150c4938bb6..a88c863f7ff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.box2Fox @@ -25,11 +26,11 @@ import net.liftweb.common.Box.tryo import scala.concurrent.ExecutionContext object VolumeSegmentIndexService { - def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, - fallbackLayer: Option[RemoteFallbackLayer], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Boolean] = + def canHaveSegmentIndex(remoteDatastoreClient: TSRemoteDatastoreClient, fallbackLayer: Option[RemoteFallbackLayer])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Boolean] = fallbackLayer match { - case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer, userToken) + case Some(layer) => remoteDatastoreClient.hasSegmentIndexFile(layer) case None => Fox.successful(true) } } @@ -158,17 +159,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore bucketList <- addEmptyFallback(bucketListBox) } yield bucketList - def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( - fallbackLayer: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long] = None, - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + def getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(fallbackLayer: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long] = None, + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { bucketListBox <- getSegmentToBucketIndex(fallbackLayer, tracingId, @@ -178,8 +179,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName, editableMappingTracingId, additionalCoordinates, - additionalAxes, - userToken).futureBox + additionalAxes).futureBox bucketList <- addEmptyFallback(bucketListBox) } yield bucketList @@ -191,17 +191,17 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore case Empty => Fox.successful(ListOfVec3IntProto(Seq.empty)) } - private def getSegmentToBucketIndex( - fallbackLayerOpt: Option[RemoteFallbackLayer], - tracingId: String, - segmentId: Long, - mag: Vec3Int, - version: Option[Long], - mappingName: Option[String], - editableMappingTracingId: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - additionalAxes: Option[Seq[AdditionalAxis]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[ListOfVec3IntProto] = + private def getSegmentToBucketIndex(fallbackLayerOpt: Option[RemoteFallbackLayer], + tracingId: String, + segmentId: Long, + mag: Vec3Int, + version: Option[Long], + mappingName: Option[String], + editableMappingTracingId: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]], + additionalAxes: Option[Seq[AdditionalAxis]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[ListOfVec3IntProto] = for { fromMutableIndex <- getSegmentToBucketIndexFromFossilDB(tracingId, segmentId, @@ -211,12 +211,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore additionalAxes).fillEmpty(ListOfVec3IntProto.of(Seq())) fromFileIndex <- fallbackLayerOpt match { // isEmpty is not the same as length == 0 here :( case Some(fallbackLayer) if fromMutableIndex.length == 0 => - getSegmentToBucketIndexFromFile(fallbackLayer, - segmentId, - mag, - mappingName, - editableMappingTracingId, - userToken) // additional coordinates not supported, see #7556 + getSegmentToBucketIndexFromFile(fallbackLayer, segmentId, mag, mappingName, editableMappingTracingId) // additional coordinates not supported, see #7556 case _ => Fox.successful(Seq.empty) } combined = fromMutableIndex.values.map(vec3IntFromProto) ++ fromFileIndex @@ -237,8 +232,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore segmentId: Long, mag: Vec3Int, mappingName: Option[String], - editableMappingTracingId: Option[String], - userToken: Option[String]) = - remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId, userToken) + editableMappingTracingId: Option[String])(implicit tc: TokenContext) = + remoteDatastoreClient.querySegmentIndex(layer, segmentId, mag, mappingName, editableMappingTracingId) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index a185e2b3735..6ad05f26680 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -26,14 +27,14 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = calculateSegmentVolume( segmentId, mag, additionalCoordinates, - getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(annotationId, tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) def getSegmentBoundingBox(annotationId: String, @@ -41,42 +42,38 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci segmentId: Long, mag: Vec3Int, mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(implicit ec: ExecutionContext): Fox[BoundingBox] = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[BoundingBox] = calculateSegmentBoundingBox( segmentId, mag, additionalCoordinates, - getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates, userToken), - getTypedDataForBucketPosition(annotationId, tracingId, userToken) + getBucketPositions(annotationId, tracingId, mappingName, additionalCoordinates), + getTypedDataForBucketPosition(annotationId, tracingId) ) - private def getTypedDataForBucketPosition(annotationId: String, tracingId: String, userToken: Option[String])( + private def getTypedDataForBucketPosition(annotationId: String, tracingId: String)( bucketPosition: Vec3Int, mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]]) = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext) = for { - tracing <- volumeTracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" - bucketData <- getVolumeDataForPositions(tracing, - tracingId, - mag, - Seq(bucketPosition), - additionalCoordinates, - userToken) + tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" + bucketData <- getVolumeDataForPositions(tracing, tracingId, mag, Seq(bucketPosition), additionalCoordinates) dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray( bucketData, elementClassFromProto(tracing.elementClass)) } yield dataTyped - private def getBucketPositions( - annotationId: String, - tracingId: String, - mappingName: Option[String], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String])(segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext) = + private def getBucketPositions(annotationId: String, + tracingId: String, + mappingName: Option[String], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])( + segmentId: Long, + mag: Vec3Int)(implicit ec: ExecutionContext, tc: TokenContext) = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId, userToken) - tracing <- volumeTracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" + fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId) + tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( @@ -88,8 +85,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci mappingName, editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId), additionalCoordinates, - additionalAxes, - userToken + additionalAxes ) } yield allBucketPositions @@ -97,8 +93,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci tracingId: String, mag: Vec3Int, bucketPositions: Seq[Vec3Int], - additionalCoordinates: Option[Seq[AdditionalCoordinate]], - userToken: Option[String]): Fox[Array[Byte]] = { + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = bucketPositions.map { position => WebknossosDataRequest( @@ -113,8 +108,8 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci }.toList for { (data, _) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, dataRequests, userToken) - else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true, userToken) + editableMappingService.volumeData(tracing, tracingId, dataRequests) + else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true) } yield data } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 3aa03f8be6d..0f3e8d06d5c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -216,22 +216,20 @@ trait VolumeTracingBucketHelper } } - private def loadFallbackBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { + private def loadFallbackBucket(layer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { val dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, - cubeSize = dataLayer.lengthOfUnderlyingCubes(bucket.mag), + cubeSize = layer.lengthOfUnderlyingCubes(bucket.mag), fourBit = None, - applyAgglomerate = dataLayer.tracing.mappingName, + applyAgglomerate = layer.tracing.mappingName, version = None, additionalCoordinates = None ) for { - remoteFallbackLayer <- dataLayer.volumeTracingService - .remoteFallbackLayerFromVolumeTracing(dataLayer.tracing, dataLayer.name) - (unmappedData, indices) <- dataLayer.volumeTracingService.getFallbackDataFromDatastore(remoteFallbackLayer, - List(dataRequest), - dataLayer.userToken) + remoteFallbackLayer <- layer.volumeTracingService.remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.name) + (unmappedData, indices) <- layer.volumeTracingService + .getFallbackDataFromDatastore(remoteFallbackLayer, List(dataRequest))(ec, layer.tokenContext) unmappedDataOrEmpty <- if (indices.isEmpty) Fox.successful(unmappedData) else Fox.empty } yield unmappedDataOrEmpty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 25f59bb4bda..97cf49ed688 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} @@ -77,13 +78,13 @@ trait VolumeTracingDownsampling protected def volumeSegmentIndexClient: FossilDBClient - protected def downsampleWithLayer(annotationId: String, - tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - dataLayer: VolumeTracingLayer, - tracingService: VolumeTracingService, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[List[Vec3Int]] = { + protected def downsampleWithLayer( + annotationId: String, + tracingId: String, + oldTracingId: String, + tracing: VolumeTracing, + dataLayer: VolumeTracingLayer, + tracingService: VolumeTracingService)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Vec3Int]] = { val bucketVolume = 32 * 32 * 32 for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." @@ -106,15 +107,15 @@ trait VolumeTracingDownsampling dataLayer) requiredMag } - fallbackLayer <- tracingService.getFallbackLayer(annotationId, oldTracingId, userToken) // remote wk does not know the new id yet - tracing <- tracingService.find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" + fallbackLayer <- tracingService.getFallbackLayer(annotationId, oldTracingId) // remote wk does not know the new id yet + tracing <- tracingService.find(annotationId, tracingId) ?~> "tracing.notFound" segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, tracing.version, tracingService.remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken) + tc) _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => for { _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index f8d620e1405..e22563365ee 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -75,7 +76,7 @@ case class VolumeTracingLayer( isTemporaryTracing: Boolean = false, includeFallbackDataIfAvailable: Boolean = false, tracing: VolumeTracing, - userToken: Option[String], + tokenContext: TokenContext, additionalAxes: Option[Seq[AdditionalAxis]] )(implicit val volumeDataStore: FossilDBClient, implicit val volumeDataCache: TemporaryVolumeDataStore, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b20bf1525ff..6f54ca67529 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} @@ -122,14 +123,13 @@ class VolumeTracingService @Inject()( def applyBucketMutatingActions(annotationId: String, updateActions: List[BucketMutatingVolumeUpdateAction], - newVersion: Long, - userToken: Option[String]): Fox[Unit] = + newVersion: Long)(implicit tc: TokenContext): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction tracingId <- updateActions.headOption.map(_.actionTracingId).toFox - fallbackLayerOpt <- getFallbackLayer(annotationId, tracingId, userToken) - tracing <- find(annotationId, tracingId, userToken = userToken) ?~> "tracing.notFound" + fallbackLayerOpt <- getFallbackLayer(annotationId, tracingId) + tracing <- find(annotationId, tracingId) ?~> "tracing.notFound" segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -137,7 +137,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayerOpt, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- Fox.serialCombined(updateActions) { case a: UpdateBucketVolumeAction => @@ -150,13 +150,7 @@ class VolumeTracingService @Inject()( if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") } else - deleteSegmentData(annotationId, - tracingId, - tracing, - a, - segmentIndexBuffer, - newVersion, - userToken = userToken) ?~> "Failed to delete segment data." + deleteSegmentData(annotationId, tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to delete segment data." case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() @@ -166,7 +160,7 @@ class VolumeTracingService @Inject()( volumeTracing: VolumeTracing, action: UpdateBucketVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - updateGroupVersion: Long): Fox[VolumeTracing] = + updateGroupVersion: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- assertMagIsValid(volumeTracing, action.mag) ?~> s"Received a mag-${action.mag.toMagLiteral(allowScalar = true)} bucket, which is invalid for this annotation." bucketPosition = BucketPosition(action.position.x, @@ -199,13 +193,12 @@ class VolumeTracingService @Inject()( tracingId: String, version: Option[Long] = None, useCache: Boolean = true, - applyUpdates: Boolean = false, - userToken: Option[String]): Fox[VolumeTracing] = + applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[VolumeTracing] = if (tracingId == TracingIds.dummyTracingId) Fox.successful(dummyTracing) else { for { - annotation <- annotationService.getWithTracings(annotationId, version, List.empty, List(tracingId), userToken) // TODO is applyUpdates still needed? + annotation <- annotationService.getWithTracings(annotationId, version, List.empty, List(tracingId)) // TODO is applyUpdates still needed? tracing <- annotation.getVolume(tracingId) } yield tracing } @@ -223,8 +216,7 @@ class VolumeTracingService @Inject()( volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, - version: Long, - userToken: Option[String]): Fox[VolumeTracing] = + version: Long)(implicit tc: TokenContext): Fox[VolumeTracing] = for { _ <- Fox.successful(()) dataLayer = volumeTracingLayer(tracingId, volumeTracing) @@ -239,7 +231,7 @@ class VolumeTracingService @Inject()( Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(resolution) for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, tracingId, @@ -249,8 +241,7 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId(volumeTracing, tracingId), additionalCoordinates, - dataLayer.additionalAxes, - userToken + dataLayer.additionalAxes ) bucketPositions = bucketPositionsRaw.values .map(vec3IntFromProto) @@ -294,22 +285,21 @@ class VolumeTracingService @Inject()( tracingId: String, sourceVersion: Long, newVersion: Long, - tracing: VolumeTracing, - userToken: Option[String]): Fox[VolumeTracing] = { + tracing: VolumeTracing)(implicit tc: TokenContext): Fox[VolumeTracing] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken) - sourceTracing <- find(annotationId, tracingId, Some(sourceVersion), userToken = userToken) + tc) + sourceTracing <- find(annotationId, tracingId, Some(sourceVersion)) mappingName <- baseMappingName(sourceTracing) _ <- Fox.serialCombined(bucketStream) { case (bucketPosition, dataBeforeRevert, version) => @@ -352,11 +342,9 @@ class VolumeTracingService @Inject()( } yield sourceTracing } - def initializeWithDataMultiple(annotationId: String, - tracingId: String, - tracing: VolumeTracing, - initialData: File, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Set[Vec3Int]] = + def initializeWithDataMultiple(annotationId: String, tracingId: String, tracing: VolumeTracing, initialData: File)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) Failure("Tracing has already been edited.") else { @@ -393,7 +381,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) destinationDataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -401,7 +389,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bytes) => for { @@ -427,15 +415,14 @@ class VolumeTracingService @Inject()( tracingId: String, tracing: VolumeTracing, initialData: File, - resolutionRestrictions: ResolutionRestrictions, - userToken: Option[String]): Fox[Set[Vec3Int]] = + resolutionRestrictions: ResolutionRestrictions)(implicit tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) { Failure("Tracing has already been edited.") } else { val dataLayer = volumeTracingLayer(tracingId, tracing) val savedResolutions = new mutable.HashSet[Vec3Int]() for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) mappingName <- baseMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, @@ -444,7 +431,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), - userToken + tc ) _ <- withBucketsFromZip(initialData) { (bucketPosition, bytes) => if (resolutionRestrictions.isForbidden(bucketPosition.mag)) { @@ -474,10 +461,11 @@ class VolumeTracingService @Inject()( } } - def allDataZip(tracingId: String, - tracing: VolumeTracing, - volumeDataZipFormat: VolumeDataZipFormat, - voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext): Fox[Files.TemporaryFile] = { + def allDataZip( + tracingId: String, + tracing: VolumeTracing, + volumeDataZipFormat: VolumeDataZipFormat, + voxelSize: Option[VoxelSize])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Files.TemporaryFile] = { val zipped = temporaryFileCreator.create(tracingId, ".zip") val os = new BufferedOutputStream(new FileOutputStream(new File(zipped.path.toString))) allDataToOutputStream(tracingId, tracing, volumeDataZipFormat, voxelSize, os).map(_ => zipped) @@ -487,7 +475,7 @@ class VolumeTracingService @Inject()( tracing: VolumeTracing, volumeDataZipFormmat: VolumeDataZipFormat, voxelSize: Option[VoxelSize], - os: OutputStream)(implicit ec: ExecutionContext): Fox[Unit] = { + os: OutputStream)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val buckets: Iterator[NamedStream] = volumeDataZipFormmat match { case VolumeDataZipFormat.wkw => @@ -518,11 +506,10 @@ class VolumeTracingService @Inject()( def data(tracingId: String, tracing: VolumeTracing, dataRequests: DataRequestCollection, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): Fox[(Array[Byte], List[Int])] = + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { isTemporaryTracing <- isTemporaryTracing(tracingId) - dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable, userToken) + dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) data <- binaryDataService.handleDataRequests(requests) @@ -537,13 +524,12 @@ class VolumeTracingService @Inject()( editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], boundingBox: Option[BoundingBox], - mappingName: Option[String], - userToken: Option[String]): Fox[(String, VolumeTracing)] = { + mappingName: Option[String])(implicit tc: TokenContext): Fox[(String, VolumeTracing)] = { val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) val tracingWithResolutionRestrictions = restrictMagList(tracingWithBB, resolutionRestrictions) for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) - hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) + hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithResolutionRestrictions.copy( createdTimestamp = System.currentTimeMillis(), editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithResolutionRestrictions.editPosition), @@ -556,7 +542,7 @@ class VolumeTracingService @Inject()( ) _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "resolutionRestrictions.tooTight" newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(annotationId, tracingId, sourceTracing, newId, newTracing, userToken) + _ <- duplicateData(annotationId, tracingId, sourceTracing, newId, newTracing) } yield (newId, newTracing) } @@ -580,14 +566,13 @@ class VolumeTracingService @Inject()( sourceId: String, sourceTracing: VolumeTracing, destinationId: String, - destinationTracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = + destinationTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = for { isTemporaryTracing <- isTemporaryTracing(sourceId) sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(annotationId, sourceId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, sourceId) segmentIndexBuffer = new VolumeSegmentIndexBuffer( destinationId, volumeSegmentIndexClient, @@ -595,7 +580,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), - userToken + tc ) mappingName <- baseMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { @@ -619,18 +604,18 @@ class VolumeTracingService @Inject()( _ <- segmentIndexBuffer.flush() } yield () - private def volumeTracingLayer(tracingId: String, - tracing: VolumeTracing, - isTemporaryTracing: Boolean = false, - includeFallbackDataIfAvailable: Boolean = false, - userToken: Option[String] = None): VolumeTracingLayer = + private def volumeTracingLayer( + tracingId: String, + tracing: VolumeTracing, + isTemporaryTracing: Boolean = false, + includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): VolumeTracingLayer = VolumeTracingLayer( name = tracingId, isTemporaryTracing = isTemporaryTracing, volumeTracingService = this, includeFallbackDataIfAvailable = includeFallbackDataIfAvailable, tracing = tracing, - userToken = userToken, + tokenContext = tc, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) @@ -665,35 +650,26 @@ class VolumeTracingService @Inject()( toCache) } yield id - def downsample(annotationId: String, - tracingId: String, - oldTracingId: String, - tracing: VolumeTracing, - userToken: Option[String]): Fox[Unit] = + def downsample(annotationId: String, tracingId: String, oldTracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): Fox[Unit] = for { resultingResolutions <- downsampleWithLayer(annotationId, tracingId, oldTracingId, tracing, volumeTracingLayer(tracingId, tracing), - this, - userToken) + this) _ <- updateResolutionList(tracingId, tracing, resultingResolutions.toSet) } yield () def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty - def createAdHocMesh(annotationId: String, - tracingId: String, - request: WebknossosAdHocMeshRequest, - userToken: Option[String]): Fox[(Array[Float], List[Int])] = + def createAdHocMesh(annotationId: String, tracingId: String, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = for { - tracing <- find(annotationId: String, tracingId, userToken = userToken) ?~> "tracing.notFound" - segmentationLayer = volumeTracingLayer(tracingId, - tracing, - includeFallbackDataIfAvailable = true, - userToken = userToken) + tracing <- find(annotationId: String, tracingId) ?~> "tracing.notFound" + segmentationLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true) adHocMeshRequest = AdHocMeshRequest( None, segmentationLayer, @@ -708,9 +684,9 @@ class VolumeTracingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def findData(annotationId: String, tracingId: String, userToken: Option[String]): Fox[Option[Vec3Int]] = + def findData(annotationId: String, tracingId: String)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { - tracing <- find(annotationId: String, tracingId, userToken = userToken) ?~> "tracing.notFound" + tracing <- find(annotationId: String, tracingId) ?~> "tracing.notFound" volumeLayer = volumeTracingLayer(tracingId, tracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { @@ -794,8 +770,8 @@ class VolumeTracingService @Inject()( case (None, None) => None } - private def bucketStreamFromSelector(selector: TracingSelector, - tracing: VolumeTracing): Iterator[(BucketPosition, Array[Byte])] = { + private def bucketStreamFromSelector(selector: TracingSelector, tracing: VolumeTracing)( + implicit tc: TokenContext): Iterator[(BucketPosition, Array[Byte])] = { val dataLayer = volumeTracingLayer(selector.tracingId, tracing) dataLayer.bucketProvider.bucketStream(Some(tracing.version)) } @@ -804,8 +780,7 @@ class VolumeTracingService @Inject()( tracings: Seq[VolumeTracing], newId: String, newVersion: Long, - toCache: Boolean, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[MergedVolumeStats] = { + toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val resolutionSets = new mutable.HashSet[Set[Vec3Int]]() @@ -856,14 +831,14 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer("dummyAnnotationId", tracingSelectors.head.tracingId, userToken) // TODO annotation id from selectors + fallbackLayer <- getFallbackLayer("dummyAnnotationId", tracingSelectors.head.tracingId) // TODO annotation id from selectors segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, remoteDatastoreClient, fallbackLayer, mergedAdditionalAxes, - userToken) + tc) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(newId, elementClass, bucketPosition, bucketBytes, newVersion, toCache, mergedAdditionalAxes) @@ -886,14 +861,13 @@ class VolumeTracingService @Inject()( tracingId: String, tracing: VolumeTracing, currentVersion: Long, - userToken: Option[String], - dryRun: Boolean): Fox[Option[Int]] = { + dryRun: Boolean)(implicit tc: TokenContext): Fox[Option[Int]] = { var processedBucketCount = 0 for { isTemporaryTracing <- isTemporaryTracing(tracingId) sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) mappingName <- baseMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, @@ -901,7 +875,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, sourceDataLayer.additionalAxes, - userToken) + tc) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => processedBucketCount += 1 @@ -929,14 +903,12 @@ class VolumeTracingService @Inject()( } yield Some(processedBucketCount) } - def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing, userToken: Option[String])( - implicit ec: ExecutionContext): Fox[Boolean] = + def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Boolean] = for { fallbackLayerOpt <- Fox.runIf(tracing.fallbackLayer.isDefined)( remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, - fallbackLayerOpt, - userToken) + canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayerOpt) alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) } yield canHaveSegmentIndex && !alreadyHasSegmentIndex @@ -944,8 +916,7 @@ class VolumeTracingService @Inject()( tracingId: String, tracing: VolumeTracing, zipFile: File, - currentVersion: Int, - userToken: Option[String])(implicit mp: MessagesProvider): Fox[Long] = + currentVersion: Int)(implicit mp: MessagesProvider, tc: TokenContext): Fox[Long] = if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { @@ -972,7 +943,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(annotationId, tracingId, userToken) + fallbackLayer <- getFallbackLayer(annotationId, tracingId) mappingName <- baseMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, @@ -981,7 +952,7 @@ class VolumeTracingService @Inject()( remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, - userToken)) + tc)) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { _ <- saveBucket(volumeLayer, bucketPosition, bucketBytes, tracing.version + 1) @@ -1020,7 +991,7 @@ class VolumeTracingService @Inject()( def dummyTracing: VolumeTracing = ??? - def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)], userToken: Option[String]): Fox[String] = + def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)])(implicit tc: TokenContext): Fox[String] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => @@ -1029,7 +1000,7 @@ class VolumeTracingService @Inject()( _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" editableMappingIds <- Fox.serialCombined(tracingsWithIds)(tracingWithId => tracingWithId._1.mappingName) _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" - newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer, userToken) + newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer) } yield newEditableMappingId } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty @@ -1037,16 +1008,15 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot merge tracings with and without editable mappings") } - def getFallbackLayer(annotationId: String, - tracingId: String, - userToken: Option[String]): Fox[Option[RemoteFallbackLayer]] = - fallbackLayerCache.getOrLoad((annotationId, tracingId, userToken), - t => getFallbackLayerFromWebknossos(t._1, t._2, t._3)) + def getFallbackLayer(annotationId: String, tracingId: String)( + implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = + fallbackLayerCache.getOrLoad((annotationId, tracingId, tc.userTokenOpt), + t => getFallbackLayerFromWebknossos(t._1, t._2)) - private def getFallbackLayerFromWebknossos(annotationId: String, tracingId: String, userToken: Option[String]) = + private def getFallbackLayerFromWebknossos(annotationId: String, tracingId: String)(implicit tc: TokenContext) = Fox[Option[RemoteFallbackLayer]] { for { - tracing <- find(annotationId, tracingId, userToken = userToken) + tracing <- find(annotationId, tracingId) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) dataSourceId = dataSource.id fallbackLayerName = tracing.fallbackLayer diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 1c7a3c5ab00..9e9a44a8253 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -5,6 +5,7 @@ # Health endpoint GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +# Annotations (concerns AnnotationProto, not annotation info as stored in postgres) POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(token: Option[String], annotationId: String) GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) From de717c70d2d1902f457fa9aed0eee58d97da7bea Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 17 Sep 2024 14:30:43 +0200 Subject: [PATCH 061/361] use tracingId directly for editableMappings --- .../annotation/AnnotationWithTracings.scala | 6 ++ .../annotation/TSAnnotationService.scala | 23 ++++++- .../controllers/TracingController.scala | 10 +-- .../controllers/VolumeTracingController.scala | 15 ++--- .../tracings/TracingService.scala | 3 +- .../EditableMappingService.scala | 65 +++++++++---------- .../skeleton/SkeletonTracingService.scala | 3 +- .../volume/VolumeTracingService.scala | 12 ++-- 8 files changed, 84 insertions(+), 53 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 2a1eda1d2dc..aad111d9e9b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -39,6 +39,12 @@ case class AnnotationWithTracings( } } yield volumeTracing + def volumesIdsThatHaveEditableMapping: List[String] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) if vt.getHasEditableMapping => Some(id) + case _ => None + }.toList + def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = for { (info, _) <- editableMappingsByTracingId.get(tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c0b5b191620..1d472a89cf5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -9,6 +9,7 @@ import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappin import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingService, EditableMappingUpdateAction, EditableMappingUpdater } @@ -150,9 +151,29 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl updates, requestedSkeletonTracingIds, requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" - updated <- applyUpdates(annotationWithTracings, annotationId, updates, targetVersion) ?~> "applyUpdates.inner.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationWithTracings, updates) + updated <- applyUpdates(annotationWithTracingsAndMappings, annotationId, updates, targetVersion) ?~> "applyUpdates.inner.failed" } yield updated + private def findEditableMappingsForUpdates( // TODO integrate with findTracings? + annotationWithTracings: AnnotationWithTracings, + updates: List[UpdateAction])(implicit ec: ExecutionContext) = { + val editableMappingIds = annotationWithTracings.volumesIdsThatHaveEditableMapping + // TODO intersect with editable mapping updates? + for { + editableMappingInfos <- Fox.serialCombined(editableMappingIds) { editableMappingId => + tracingDataStore.editableMappingsInfo.get(editableMappingId, version = Some(annotationWithTracings.version))( + fromProtoBytes[EditableMappingInfo]) + } + } yield + annotationWithTracings.copy( + editableMappingsByTracingId = editableMappingInfos + .map(keyValuePair => (keyValuePair.key, (keyValuePair.value, editableMappingUpdaterFor(keyValuePair.value)))) + .toMap) + } + + def editableMappingUpdaterFor(editableMappingInfo: EditableMappingInfo): EditableMappingUpdater = ??? // TODO + private def findTracingsForUpdates( annotation: AnnotationProto, updates: List[UpdateAction], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 133453f00b8..f157621f7ad 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -106,13 +106,13 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) case _ => None } - newId = tracingService.generateTracingId + newTracingId = tracingService.generateTracingId mergedVolumeStats <- tracingService.mergeVolumeData(request.body.flatten, tracingsWithIds.map(_._1), - newId, + newTracingId, newVersion = 0L, toCache = !persist) - newEditableMappingIdBox <- tracingService.mergeEditableMappings(tracingsWithIds).futureBox + newEditableMappingIdBox <- tracingService.mergeEditableMappings(newTracingId, tracingsWithIds).futureBox newEditableMappingIdOpt <- newEditableMappingIdBox match { case Full(newEditableMappingId) => Fox.successful(Some(newEditableMappingId)) case Empty => Fox.successful(None) @@ -120,8 +120,8 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } mergedTracing <- Fox.box2Fox( tracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) - _ <- tracingService.save(mergedTracing, Some(newId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newId)) + _ <- tracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) + } yield Ok(Json.toJson(newTracingId)) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 4eb936fc829..ca53733ba43 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -221,11 +221,13 @@ class VolumeTracingController @Inject()( boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt)) + newTracingId = tracingService.generateTracingId + _ <- Fox.runIf(tracing.getHasEditableMapping)(editableMappingService + .duplicate(tracing.mappingName, newTracingId, version = None, remoteFallbackLayerOpt)) (newId, newTracing) <- tracingService.duplicate( annotationId, tracingId, + newTracingId, tracing, fromTask.getOrElse(false), datasetBoundingBox, @@ -233,7 +235,7 @@ class VolumeTracingController @Inject()( editPositionParsed, editRotationParsed, boundingBoxParsed, - newEditableMappingId + mappingName = None ) _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(annotationId, newId, tracingId, newTracing)) } yield Ok(Json.toJson(newId)) @@ -377,9 +379,8 @@ class VolumeTracingController @Inject()( tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" _ <- assertMappingIsNotLocked(tracing) _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" - (editableMappingId, editableMappingInfo) <- editableMappingService.create( - baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameVolumeAction(Some(editableMappingId), + editableMappingInfo <- editableMappingService.create(tracingId, baseMappingName = tracingMappingName) + volumeUpdate = UpdateMappingNameVolumeAction(Some(tracingId), isEditable = Some(true), isLocked = Some(true), actionTracingId = tracingId, @@ -399,7 +400,6 @@ class VolumeTracingController @Inject()( 0)) ) infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = editableMappingId, editableMappingInfo = editableMappingInfo, version = Some(0L)) } yield Ok(infoJson) @@ -480,7 +480,6 @@ class VolumeTracingController @Inject()( mappingName <- tracing.mappingName.toFox editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingId = mappingName, editableMappingInfo = editableMappingInfo, version = version) } yield Ok(infoJson) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 9d0291c6c51..ed8428eb7aa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -171,5 +171,6 @@ trait TracingService[T <: GeneratedMessage] newVersion: Long, toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] - def mergeEditableMappings(tracingsWithIds: List[(T, String)])(implicit tc: TokenContext): Fox[String] + def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(T, String)])( + implicit tc: TokenContext): Fox[String] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index e82f75cb1e3..ef8671a611f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -121,15 +121,12 @@ class EditableMappingService @Inject()( private lazy val agglomerateToGraphCache: AlfuCache[(String, Long, Long), AgglomerateGraph] = AlfuCache(maxCapacity = 50) - def infoJson(tracingId: String, - editableMappingInfo: EditableMappingInfo, - editableMappingId: String, - version: Option[Long]): Fox[JsObject] = + def infoJson(tracingId: String, editableMappingInfo: EditableMappingInfo, version: Option[Long]): Fox[JsObject] = for { - version <- getClosestMaterializableVersionOrZero(editableMappingId, version) + version <- getClosestMaterializableVersionOrZero(tracingId, version) } yield Json.obj( - "mappingName" -> editableMappingId, + "mappingName" -> tracingId, // TODO remove? "version" -> version, "tracingId" -> tracingId, "baseMappingName" -> editableMappingInfo.baseMappingName, @@ -137,36 +134,39 @@ class EditableMappingService @Inject()( "createdTimestamp" -> editableMappingInfo.createdTimestamp ) - def create(baseMappingName: String): Fox[(String, EditableMappingInfo)] = { - val newId = generateId + def create(tracingId: String, baseMappingName: String): Fox[EditableMappingInfo] = { val newEditableMappingInfo = EditableMappingInfo( baseMappingName = baseMappingName, createdTimestamp = Instant.now.epochMillis, largestAgglomerateId = 0L ) for { - _ <- tracingDataStore.editableMappingsInfo.put(newId, 0L, toProtoBytes(newEditableMappingInfo)) - } yield (newId, newEditableMappingInfo) + _ <- tracingDataStore.editableMappingsInfo.put(tracingId, 0L, toProtoBytes(newEditableMappingInfo)) + } yield newEditableMappingInfo } def duplicate(editableMappingIdOpt: Option[String], + newTracingId: String, version: Option[Long], - remoteFallbackLayerBox: Box[RemoteFallbackLayer])(implicit tc: TokenContext): Fox[String] = + remoteFallbackLayerBox: Box[RemoteFallbackLayer])(implicit tc: TokenContext): Fox[Unit] = for { editableMappingId <- editableMappingIdOpt ?~> "duplicate on editable mapping without id" remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer) - newIdAndInfoV0 <- create(editableMappingInfoAndVersion._1.baseMappingName) - newId = newIdAndInfoV0._1 + newIdAndInfoV0 <- create(newTracingId, editableMappingInfoAndVersion._1.baseMappingName) newVersion = editableMappingInfoAndVersion._2 - _ <- tracingDataStore.editableMappingsInfo.put(newId, newVersion, toProtoBytes(editableMappingInfoAndVersion._1)) - _ <- duplicateSegmentToAgglomerate(editableMappingId, newId, newVersion) - _ <- duplicateAgglomerateToGraph(editableMappingId, newId, newVersion) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, + newVersion, + toProtoBytes(editableMappingInfoAndVersion._1)) + _ <- duplicateSegmentToAgglomerate(editableMappingId, newTracingId, newVersion) + _ <- duplicateAgglomerateToGraph(editableMappingId, newTracingId, newVersion) updateActionsWithVersions <- getUpdateActionsWithVersions(editableMappingId, editableMappingInfoAndVersion._2, 0L) _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion: (Long, List[UpdateAction]) => - tracingDataStore.editableMappingUpdates.put(newId, updateActionsWithVersion._1, updateActionsWithVersion._2) + tracingDataStore.editableMappingUpdates.put(newTracingId, + updateActionsWithVersion._1, + updateActionsWithVersion._2) } - } yield newId + } yield () private def duplicateSegmentToAgglomerate(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { val iterator = @@ -689,29 +689,28 @@ class EditableMappingService @Inject()( neighborNodes } - def merge(editableMappingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( + def merge(newTracingId: String, tracingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[String] = for { - firstMappingId <- editableMappingIds.headOption.toFox + firstMappingId <- tracingIds.headOption.toFox before = Instant.now - newMappingId <- duplicate(Some(firstMappingId), version = None, Some(remoteFallbackLayer)) - _ <- Fox.serialCombined(editableMappingIds.tail)(editableMappingId => - mergeInto(newMappingId, editableMappingId, remoteFallbackLayer)) - _ = logger.info(s"Merging ${editableMappingIds.length} editable mappings took ${Instant.since(before)}") - } yield newMappingId + _ <- duplicate(Some(firstMappingId), newTracingId, version = None, Some(remoteFallbackLayer)) + _ <- Fox.serialCombined(tracingIds.tail)(editableMappingId => + mergeInto(newTracingId, editableMappingId, remoteFallbackLayer)) + _ = logger.info(s"Merging ${tracingIds.length} editable mappings took ${Instant.since(before)}") + } yield newTracingId // read as: merge source into target (mutate target) - private def mergeInto(targetEditableMappingId: String, - sourceEditableMappingId: String, - remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = + private def mergeInto(targetTracingId: String, sourceTracingId: String, remoteFallbackLayer: RemoteFallbackLayer)( + implicit tc: TokenContext): Fox[Unit] = for { - targetNewestVersion <- getClosestMaterializableVersionOrZero(targetEditableMappingId, None) - sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceEditableMappingId, None, remoteFallbackLayer) + targetNewestVersion <- getClosestMaterializableVersionOrZero(targetTracingId, None) + sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceTracingId, None, remoteFallbackLayer) sourceNewestVersion = sourceNewestMaterializedWithVersion._2 - updateActionsWithVersions <- getUpdateActionsWithVersions(sourceEditableMappingId, sourceNewestVersion, 0L) + updateActionsWithVersions <- getUpdateActionsWithVersions(sourceTracingId, sourceNewestVersion, 0L) updateActionsToApply = updateActionsWithVersions.map(_._2).reverse.flatten updater = new EditableMappingUpdater( - targetEditableMappingId, + targetTracingId, sourceNewestMaterializedWithVersion._1.baseMappingName, targetNewestVersion, targetNewestVersion + sourceNewestVersion, @@ -724,7 +723,7 @@ class EditableMappingService @Inject()( ) _ <- updater.applyUpdatesAndSave(sourceNewestMaterializedWithVersion._1, updateActionsToApply) _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion => - tracingDataStore.editableMappingUpdates.put(targetEditableMappingId, + tracingDataStore.editableMappingUpdates.put(targetTracingId, updateActionsWithVersion._1 + targetNewestVersion, updateActionsWithVersion._2) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index aaff678f9c3..ad512b77dd7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -133,6 +133,7 @@ class SkeletonTracingService @Inject()( def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance - def mergeEditableMappings(tracingsWithIds: List[(SkeletonTracing, String)])(implicit tc: TokenContext): Fox[String] = + def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(SkeletonTracing, String)])( + implicit tc: TokenContext): Fox[String] = Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 6f54ca67529..4882e4fc21e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -517,6 +517,7 @@ class VolumeTracingService @Inject()( def duplicate(annotationId: String, tracingId: String, + newTracingId: String, sourceTracing: VolumeTracing, fromTask: Boolean, datasetBoundingBox: Option[BoundingBox], @@ -535,13 +536,15 @@ class VolumeTracingService @Inject()( editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithResolutionRestrictions.editPosition), editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithResolutionRestrictions.editRotation), boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithResolutionRestrictions.boundingBox), - mappingName = mappingName.orElse(tracingWithResolutionRestrictions.mappingName), + mappingName = mappingName.orElse( + if (sourceTracing.getHasEditableMapping) Some(newTracingId) + else tracingWithResolutionRestrictions.mappingName), version = 0, // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData hasSegmentIndex = Some(hasSegmentIndex) ) _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "resolutionRestrictions.tooTight" - newId <- save(newTracing, None, newTracing.version) + newId <- save(newTracing, Some(newTracingId), newTracing.version) _ <- duplicateData(annotationId, tracingId, sourceTracing, newId, newTracing) } yield (newId, newTracing) } @@ -991,7 +994,8 @@ class VolumeTracingService @Inject()( def dummyTracing: VolumeTracing = ??? - def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)])(implicit tc: TokenContext): Fox[String] = + def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(VolumeTracing, String)])( + implicit tc: TokenContext): Fox[String] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => @@ -1000,7 +1004,7 @@ class VolumeTracingService @Inject()( _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" editableMappingIds <- Fox.serialCombined(tracingsWithIds)(tracingWithId => tracingWithId._1.mappingName) _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" - newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer) + newEditableMappingId <- editableMappingService.merge(newTracingId, editableMappingIds, remoteFallbackLayer) } yield newEditableMappingId } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty From ba68b72a6eeaef5cef973cb2477e91b9af75f2cc Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 10:13:32 +0200 Subject: [PATCH 062/361] address editable mappings by tracing id --- conf/messages | 1 + .../annotation/TSAnnotationService.scala | 6 +- .../controllers/TracingController.scala | 12 +- .../controllers/VolumeTracingController.scala | 22 ++- .../tracings/TracingService.scala | 2 +- .../EditableMappingLayer.scala | 6 +- .../EditableMappingService.scala | 182 +++++++++--------- .../EditableMappingUpdater.scala | 54 +++--- .../skeleton/SkeletonTracingService.scala | 2 +- .../volume/VolumeTracingService.scala | 14 +- 10 files changed, 146 insertions(+), 155 deletions(-) diff --git a/conf/messages b/conf/messages index ae9f4836e2e..761a81b3f9f 100644 --- a/conf/messages +++ b/conf/messages @@ -187,6 +187,7 @@ annotation.volume.invalidLargestSegmentId=Cannot create tasks with fallback segm annotation.volume.resolutionRestrictionsTooTight=Task type resolution restrictions are too tight, resulting annotation has no resolutions. annotation.volume.resolutionsDoNotMatch=Could not merge volume annotations, as their resolutions differ. Please ensure each annotation has the same set of resolutions. annotation.volume.largestSegmentIdExceedsRange=The largest segment id {0} specified for the annotation layer exceeds the range of its data type {1} +annotation.volume.noEditableMapping=This volume tracing does not have an editable mapping (not a “proofreading” annotation layer) annotation.notFound=Annotation could not be found annotation.notFound.considerLoggingIn=Annotation could not be found. If the annotation is not public, you need to log in to see it. annotation.invalid=Invalid annotation diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 1d472a89cf5..eeb5b8ccc04 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -158,11 +158,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl private def findEditableMappingsForUpdates( // TODO integrate with findTracings? annotationWithTracings: AnnotationWithTracings, updates: List[UpdateAction])(implicit ec: ExecutionContext) = { - val editableMappingIds = annotationWithTracings.volumesIdsThatHaveEditableMapping + val volumeIdsWithEditableMapping = annotationWithTracings.volumesIdsThatHaveEditableMapping // TODO intersect with editable mapping updates? for { - editableMappingInfos <- Fox.serialCombined(editableMappingIds) { editableMappingId => - tracingDataStore.editableMappingsInfo.get(editableMappingId, version = Some(annotationWithTracings.version))( + editableMappingInfos <- Fox.serialCombined(volumeIdsWithEditableMapping) { volumeTracingId => + tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(annotationWithTracings.version))( fromProtoBytes[EditableMappingInfo]) } } yield diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index f157621f7ad..4125161aaf1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -112,11 +112,13 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C newTracingId, newVersion = 0L, toCache = !persist) - newEditableMappingIdBox <- tracingService.mergeEditableMappings(newTracingId, tracingsWithIds).futureBox - newEditableMappingIdOpt <- newEditableMappingIdBox match { - case Full(newEditableMappingId) => Fox.successful(Some(newEditableMappingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox + mergeEditableMappingsResultBox <- tracingService + .mergeEditableMappings(newTracingId, tracingsWithIds) + .futureBox + newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { + case Full(()) => Fox.successful(Some(newTracingId)) + case Empty => Fox.successful(None) + case f: Failure => f.toFox } mergedTracing <- Fox.box2Fox( tracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ca53733ba43..3270a11a992 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -222,8 +222,8 @@ class VolumeTracingController @Inject()( remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) newTracingId = tracingService.generateTracingId - _ <- Fox.runIf(tracing.getHasEditableMapping)(editableMappingService - .duplicate(tracing.mappingName, newTracingId, version = None, remoteFallbackLayerOpt)) + _ <- Fox.runIf(tracing.getHasEditableMapping)( + editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt)) (newId, newTracing) <- tracingService.duplicate( annotationId, tracingId, @@ -418,7 +418,7 @@ class VolumeTracingController @Inject()( tracing <- tracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer) + edges <- editableMappingService.agglomerateGraphMinCut(tracingId, request.body, remoteFallbackLayer) } yield Ok(Json.toJson(edges)) } } @@ -432,9 +432,11 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, remoteFallbackLayer) + (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + request.body, + remoteFallbackLayer) } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) } } @@ -448,7 +450,7 @@ class VolumeTracingController @Inject()( for { tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" updateGroup <- request.body.headOption.toFox @@ -477,7 +479,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracing.mappingName.toFox + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) infoJson <- editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo, @@ -495,17 +497,17 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) - editableMappingId <- tracing.mappingName.toFox + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( - editableMappingId, + tracingId, requestedVersion = None, remoteFallbackLayer = remoteFallbackLayer) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, editableMappingVersion, - editableMappingId, + tracingId, remoteFallbackLayer) agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index ed8428eb7aa..f6a1d146e0c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -172,5 +172,5 @@ trait TracingService[T <: GeneratedMessage] toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(T, String)])( - implicit tc: TokenContext): Fox[String] + implicit tc: TokenContext): Fox[Unit] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index f526a4247f2..c46b0ffc44b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -30,13 +30,13 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { val bucket: BucketPosition = readInstruction.bucket for { - editableMappingId <- Fox.successful(layer.name) + tracingId <- Fox.successful(layer.name) _ <- bool2Fox(layer.doesContainBucket(bucket)) remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied (editableMappingInfo, editableMappingVersion) <- layer.editableMappingService.getInfoAndActualVersion( - editableMappingId, + tracingId, requestedVersion = None, remoteFallbackLayer = remoteFallbackLayer)(layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( @@ -57,7 +57,7 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP segmentIds, editableMappingInfo, editableMappingVersion, - editableMappingId, + tracingId, remoteFallbackLayer)(layer.tokenContext) mappedData: Array[Byte] <- layer.editableMappingService.mapData(unmappedDataTyped, relevantMapping, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index ef8671a611f..dcf2a0bbfef 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -41,7 +41,6 @@ import play.api.libs.json.{JsObject, Json, OFormat} import java.nio.file.Paths import java.util -import java.util.UUID import scala.concurrent.ExecutionContext import scala.concurrent.duration._ import scala.jdk.CollectionConverters.CollectionHasAsScala @@ -56,15 +55,14 @@ case class MinCutParameters( segmentId1: Long, segmentId2: Long, mag: Vec3Int, - agglomerateId: Long, - editableMappingId: String + agglomerateId: Long ) object MinCutParameters { implicit val jsonFormat: OFormat[MinCutParameters] = Json.format[MinCutParameters] } -case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long, editableMappingId: String) +case class NeighborsParameters(segmentId: Long, mag: Vec3Int, agglomerateId: Long) object NeighborsParameters { implicit val jsonFormat: OFormat[NeighborsParameters] = Json.format[NeighborsParameters] @@ -107,8 +105,6 @@ class EditableMappingService @Inject()( val defaultSegmentToAgglomerateChunkSize: Int = 64 * 1024 // max. 1 MiB chunks (two 8-byte numbers per element) - private def generateId: String = UUID.randomUUID.toString - val binaryDataService = new BinaryDataService(Paths.get(""), None, None, None, None, None) adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService @@ -145,22 +141,17 @@ class EditableMappingService @Inject()( } yield newEditableMappingInfo } - def duplicate(editableMappingIdOpt: Option[String], + def duplicate(sourceTracingId: String, newTracingId: String, version: Option[Long], remoteFallbackLayerBox: Box[RemoteFallbackLayer])(implicit tc: TokenContext): Fox[Unit] = for { - editableMappingId <- editableMappingIdOpt ?~> "duplicate on editable mapping without id" remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" - editableMappingInfoAndVersion <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer) - newIdAndInfoV0 <- create(newTracingId, editableMappingInfoAndVersion._1.baseMappingName) - newVersion = editableMappingInfoAndVersion._2 - _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, - newVersion, - toProtoBytes(editableMappingInfoAndVersion._1)) - _ <- duplicateSegmentToAgglomerate(editableMappingId, newTracingId, newVersion) - _ <- duplicateAgglomerateToGraph(editableMappingId, newTracingId, newVersion) - updateActionsWithVersions <- getUpdateActionsWithVersions(editableMappingId, editableMappingInfoAndVersion._2, 0L) + (duplicatedInfo, newVersion) <- getInfoAndActualVersion(sourceTracingId, version, remoteFallbackLayer) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, newVersion, toProtoBytes(duplicatedInfo)) + _ <- duplicateSegmentToAgglomerate(sourceTracingId, newTracingId, newVersion) + _ <- duplicateAgglomerateToGraph(sourceTracingId, newTracingId, newVersion) + updateActionsWithVersions <- getUpdateActionsWithVersions(sourceTracingId, newVersion, 0L) _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion: (Long, List[UpdateAction]) => tracingDataStore.editableMappingUpdates.put(newTracingId, updateActionsWithVersion._1, @@ -168,9 +159,9 @@ class EditableMappingService @Inject()( } } yield () - private def duplicateSegmentToAgglomerate(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { + private def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { val iterator = - new VersionedFossilDbIterator(editableMappingId, + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsSegmentToAgglomerate, Some(newVersion)) for { @@ -186,9 +177,9 @@ class EditableMappingService @Inject()( } yield () } - private def duplicateAgglomerateToGraph(editableMappingId: String, newId: String, newVersion: Long): Fox[Unit] = { + private def duplicateAgglomerateToGraph(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { val iterator = - new VersionedFossilDbIterator(editableMappingId, + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsAgglomerateToGraph, Some(newVersion)) for { @@ -209,16 +200,19 @@ class EditableMappingService @Inject()( tracing <- annotation.getEditableMappingInfo(tracingId) } yield tracing - def getInfo(editableMappingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)( + def getInfo(tracingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[EditableMappingInfo] = for { - (info, _) <- getInfoAndActualVersion(editableMappingId, version, remoteFallbackLayer) + (info, _) <- getInfoAndActualVersion(tracingId, version, remoteFallbackLayer) } yield info - def getBaseMappingName(editableMappingId: String): Fox[Option[String]] = + def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = + bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" + + def getBaseMappingName(tracingId: String): Fox[Option[String]] = for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, None) - infoBox <- getClosestMaterialized(editableMappingId, desiredVersion).futureBox + desiredVersion <- getClosestMaterializableVersionOrZero(tracingId, None) + infoBox <- getClosestMaterialized(tracingId, desiredVersion).futureBox } yield infoBox match { case Full(info) => Some(info.value.baseMappingName) @@ -226,34 +220,34 @@ class EditableMappingService @Inject()( } def getInfoAndActualVersion( - editableMappingId: String, + tracingId: String, requestedVersion: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(EditableMappingInfo, Long)] = for { - desiredVersion <- getClosestMaterializableVersionOrZero(editableMappingId, requestedVersion) + desiredVersion <- getClosestMaterializableVersionOrZero(tracingId, requestedVersion) materializedInfo <- materializedInfoCache.getOrLoad( - (editableMappingId, desiredVersion), - _ => applyPendingUpdates(editableMappingId, desiredVersion, remoteFallbackLayer)) + (tracingId, desiredVersion), + _ => applyPendingUpdates(tracingId, desiredVersion, remoteFallbackLayer)) } yield (materializedInfo, desiredVersion) - def update(editableMappingId: String, + def update(tracingId: String, updateActionGroup: UpdateActionGroup, newVersion: Long, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = for { actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) - _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer) ?~> "editableMapping.dryUpdate.failed" - _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) + _ <- dryApplyUpdates(tracingId, newVersion, actionsWithTimestamp, remoteFallbackLayer) ?~> "editableMapping.dryUpdate.failed" + _ <- tracingDataStore.editableMappingUpdates.put(tracingId, newVersion, actionsWithTimestamp) } yield () - private def dryApplyUpdates(editableMappingId: String, + private def dryApplyUpdates(tracingId: String, newVersion: Long, updates: List[UpdateAction], remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = for { - (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, None, remoteFallbackLayer) + (previousInfo, previousVersion) <- getInfoAndActualVersion(tracingId, None, remoteFallbackLayer) updater = new EditableMappingUpdater( - editableMappingId, + tracingId, previousInfo.baseMappingName, previousVersion, newVersion, @@ -264,20 +258,20 @@ class EditableMappingService @Inject()( tracingDataStore, relyOnAgglomerateIds = updates.length <= 1 ) - updated <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" + _ <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" } yield () - def applyPendingUpdates(editableMappingId: String, desiredVersion: Long, remoteFallbackLayer: RemoteFallbackLayer)( + def applyPendingUpdates(tracingId: String, desiredVersion: Long, remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[EditableMappingInfo] = for { - closestMaterializedWithVersion <- getClosestMaterialized(editableMappingId, desiredVersion) + closestMaterializedWithVersion <- getClosestMaterialized(tracingId, desiredVersion) updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version) Fox.successful(closestMaterializedWithVersion.value) else for { - pendingUpdates <- getPendingUpdates(editableMappingId, closestMaterializedWithVersion.version, desiredVersion) + pendingUpdates <- getPendingUpdates(tracingId, closestMaterializedWithVersion.version, desiredVersion) updater = new EditableMappingUpdater( - editableMappingId, + tracingId, closestMaterializedWithVersion.value.baseMappingName, closestMaterializedWithVersion.version, desiredVersion, @@ -292,31 +286,31 @@ class EditableMappingService @Inject()( } yield updated } yield updatedEditableMappingInfo - private def getClosestMaterialized(editableMappingId: String, + private def getClosestMaterialized(tracingId: String, desiredVersion: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = - tracingDataStore.editableMappingsInfo.get(editableMappingId, version = Some(desiredVersion))( + tracingDataStore.editableMappingsInfo.get(tracingId, version = Some(desiredVersion))( fromProtoBytes[EditableMappingInfo]) - def getClosestMaterializableVersionOrZero(editableMappingId: String, desiredVersion: Option[Long]): Fox[Long] = - tracingDataStore.editableMappingUpdates.getVersion(editableMappingId, + def getClosestMaterializableVersionOrZero(tracingId: String, desiredVersion: Option[Long]): Fox[Long] = + tracingDataStore.editableMappingUpdates.getVersion(tracingId, version = desiredVersion, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - private def getPendingUpdates(editableMappingId: String, + private def getPendingUpdates(tracingId: String, closestMaterializedVersion: Long, closestMaterializableVersion: Long): Fox[List[UpdateAction]] = if (closestMaterializableVersion == closestMaterializedVersion) { Fox.successful(List.empty) } else { for { - updates <- getUpdateActionsWithVersions(editableMappingId, + updates <- getUpdateActionsWithVersions(tracingId, newestVersion = closestMaterializableVersion, oldestVersion = closestMaterializedVersion + 1L) } yield updates.map(_._2).reverse.flatten } - private def getUpdateActionsWithVersions(editableMappingId: String, + private def getUpdateActionsWithVersions(tracingId: String, newestVersion: Long, oldestVersion: Long): Fox[List[(Long, List[UpdateAction])]] = { val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) @@ -326,7 +320,7 @@ class EditableMappingService @Inject()( val batchTo = batchRange._2 for { res <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple[List[UpdateAction]]( - editableMappingId, + tracingId, Some(batchTo), Some(batchFrom) )(fromJsonBytes[List[UpdateAction]]) @@ -357,42 +351,42 @@ class EditableMappingService @Inject()( } yield voxelAsLong def volumeData(tracing: VolumeTracing, tracingId: String, dataRequests: DataRequestCollection)( - implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = - for { - editableMappingId <- tracing.mappingName.toFox - dataLayer = editableMappingLayer(editableMappingId, tracing, tracingId) - requests = dataRequests.map(r => - DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) - data <- binaryDataService.handleDataRequests(requests) - } yield data + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { + + val dataLayer = editableMappingLayer(tracingId, tracing, tracingId) + val requests = dataRequests.map(r => + DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) + + binaryDataService.handleDataRequests(requests) + } private def getSegmentToAgglomerateForSegmentIds(segmentIds: Set[Long], - editableMappingId: String, + tracingId: String, version: Long): Fox[Map[Long, Long]] = { val chunkIds = segmentIds.map(_ / defaultSegmentToAgglomerateChunkSize) for { maps: List[Seq[(Long, Long)]] <- Fox.serialCombined(chunkIds.toList)(chunkId => - getSegmentToAgglomerateChunkFiltered(editableMappingId, chunkId, version, segmentIds)) + getSegmentToAgglomerateChunkFiltered(tracingId, chunkId, version, segmentIds)) } yield maps.flatten.toMap } - private def getSegmentToAgglomerateChunkFiltered(editableMappingId: String, + private def getSegmentToAgglomerateChunkFiltered(tracingId: String, chunkId: Long, version: Long, segmentIds: Set[Long]): Fox[Seq[(Long, Long)]] = for { - segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version) + segmentToAgglomerateChunk <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version) filtered = segmentToAgglomerateChunk.filter(pair => segmentIds.contains(pair._1)) } yield filtered - def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, + def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long, version: Long): Fox[Seq[(Long, Long)]] = segmentToAgglomerateChunkCache.getOrLoad( - (editableMappingId, chunkId, version), + (tracingId, chunkId, version), _ => for { - chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(editableMappingId, chunkId, Some(version)).futureBox + chunkBox: Box[Seq[(Long, Long)]] <- getSegmentToAgglomerateChunk(tracingId, chunkId, Some(version)).futureBox segmentToAgglomerate <- chunkBox match { case Full(chunk) => Fox.successful(chunk) case Empty => Fox.successful(Seq.empty[(Long, Long)]) @@ -401,10 +395,10 @@ class EditableMappingService @Inject()( } yield segmentToAgglomerate ) - private def getSegmentToAgglomerateChunk(editableMappingId: String, + private def getSegmentToAgglomerateChunk(tracingId: String, chunkId: Long, version: Option[Long]): Fox[Seq[(Long, Long)]] = { - val chunkKey = segmentToAgglomerateKey(editableMappingId, chunkId) + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) getSegmentToAgglomerateChunk(chunkKey, version) } @@ -421,11 +415,11 @@ class EditableMappingService @Inject()( segmentIds: Set[Long], editableMapping: EditableMappingInfo, editableMappingVersion: Long, - editableMappingId: String, + tracingId: String, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = for { editableMappingForSegmentIds <- getSegmentToAgglomerateForSegmentIds(segmentIds, - editableMappingId, + tracingId, editableMappingVersion) segmentIdsInEditableMapping: Set[Long] = editableMappingForSegmentIds.keySet segmentIdsInBaseMapping: Set[Long] = segmentIds.diff(segmentIdsInEditableMapping) @@ -434,17 +428,16 @@ class EditableMappingService @Inject()( remoteFallbackLayer) } yield editableMappingForSegmentIds ++ baseMappingSubset - def getAgglomerateSkeletonWithFallback(editableMappingId: String, + def getAgglomerateSkeletonWithFallback(tracingId: String, remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { // called here to ensure updates are applied - editableMappingInfo <- getInfo(editableMappingId, version = None, remoteFallbackLayer) - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer).futureBox + editableMappingInfo <- getInfo(tracingId, version = None, remoteFallbackLayer) + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => - Fox.successful( - agglomerateGraphToSkeleton(editableMappingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) + Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) case Empty => remoteDatastoreClient.getAgglomerateSkeleton(remoteFallbackLayer, editableMappingInfo.baseMappingName, @@ -453,7 +446,7 @@ class EditableMappingService @Inject()( } } yield skeletonBytes - private def agglomerateGraphToSkeleton(editableMappingId: String, + private def agglomerateGraphToSkeleton(tracingId: String, graph: AgglomerateGraph, remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long): Array[Byte] = { @@ -477,7 +470,7 @@ class EditableMappingService @Inject()( createdTimestamp = System.currentTimeMillis(), nodes = nodes, edges = skeletonEdges, - name = s"agglomerate $agglomerateId ($editableMappingId)", + name = s"agglomerate $agglomerateId ($tracingId)", `type` = Some(TreeTypeProto.AGGLOMERATE) )) @@ -488,12 +481,14 @@ class EditableMappingService @Inject()( skeleton.toByteArray } - def getBaseSegmentToAgglomerate(mappingName: String, segmentIds: Set[Long], remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[Map[Long, Long]] = { + def getBaseSegmentToAgglomerate( + baseMappingName: String, + segmentIds: Set[Long], + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Map[Long, Long]] = { val segmentIdsOrdered = segmentIds.toList for { agglomerateIdsOrdered <- remoteDatastoreClient.getAgglomerateIdsForSegmentIds(remoteFallbackLayer, - mappingName, + baseMappingName, segmentIdsOrdered) } yield segmentIdsOrdered.zip(agglomerateIdsOrdered).toMap } @@ -585,12 +580,12 @@ class EditableMappingService @Inject()( def getAgglomerateGraphForIdWithFallback( mapping: EditableMappingInfo, - editableMappingId: String, + tracingId: String, version: Option[Long], agglomerateId: Long, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(editableMappingId, agglomerateId, remoteFallbackLayer, version).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer, version).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => @@ -599,13 +594,13 @@ class EditableMappingService @Inject()( } } yield agglomerateGraph - def agglomerateGraphMinCut(parameters: MinCutParameters, remoteFallbackLayer: RemoteFallbackLayer)( + def agglomerateGraphMinCut(tracingId: String, parameters: MinCutParameters, remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer) + mapping <- getInfo(tracingId, version = None, remoteFallbackLayer) agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, + tracingId, None, parameters.agglomerateId, remoteFallbackLayer) @@ -665,13 +660,15 @@ class EditableMappingService @Inject()( ) } - def agglomerateGraphNeighbors(parameters: NeighborsParameters, remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = + def agglomerateGraphNeighbors( + tracingId: String, + parameters: NeighborsParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { // called here to ensure updates are applied - mapping <- getInfo(parameters.editableMappingId, version = None, remoteFallbackLayer) + mapping <- getInfo(tracingId, version = None, remoteFallbackLayer) agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, - parameters.editableMappingId, + tracingId, None, parameters.agglomerateId, remoteFallbackLayer) @@ -690,15 +687,14 @@ class EditableMappingService @Inject()( } def merge(newTracingId: String, tracingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[String] = + implicit tc: TokenContext): Fox[Unit] = for { - firstMappingId <- tracingIds.headOption.toFox + firstTracingId <- tracingIds.headOption.toFox before = Instant.now - _ <- duplicate(Some(firstMappingId), newTracingId, version = None, Some(remoteFallbackLayer)) - _ <- Fox.serialCombined(tracingIds.tail)(editableMappingId => - mergeInto(newTracingId, editableMappingId, remoteFallbackLayer)) + _ <- duplicate(firstTracingId, newTracingId, version = None, Some(remoteFallbackLayer)) + _ <- Fox.serialCombined(tracingIds.tail)(tracingId => mergeInto(newTracingId, tracingId, remoteFallbackLayer)) _ = logger.info(s"Merging ${tracingIds.length} editable mappings took ${Instant.since(before)}") - } yield newTracingId + } yield () // read as: merge source into target (mutate target) private def mergeInto(targetTracingId: String, sourceTracingId: String, remoteFallbackLayer: RemoteFallbackLayer)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index c2a281f39f9..d0baa685453 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -32,7 +32,7 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala // this results in only one version increment in the db per update group class EditableMappingUpdater( - editableMappingId: String, + tracingId: String, baseMappingName: String, oldVersion: Long, newVersion: Long, @@ -67,7 +67,7 @@ class EditableMappingUpdater( for { _ <- Fox.serialCombined(segmentToAgglomerateBuffer.keys.toList)(flushSegmentToAgglomerateChunk) _ <- Fox.serialCombined(agglomerateToGraphBuffer.keys.toList)(flushAgglomerateGraph) - _ <- tracingDataStore.editableMappingsInfo.put(editableMappingId, newVersion, updatedEditableMappingInfo) + _ <- tracingDataStore.editableMappingsInfo.put(tracingId, newVersion, updatedEditableMappingInfo) } yield () private def flushSegmentToAgglomerateChunk(key: String): Fox[Unit] = { @@ -137,10 +137,10 @@ class EditableMappingUpdater( agglomerateGraph <- agglomerateGraphForIdWithFallback(editableMappingInfo, agglomerateId) _ = if (segmentId1 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Split action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") + s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") (graph1, graph2) <- tryo(splitGraph(agglomerateId, agglomerateGraph, update, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" largestExistingAgglomerateId <- largestAgglomerateId(editableMappingInfo) agglomerateId2 = largestExistingAgglomerateId + 1L @@ -183,12 +183,12 @@ class EditableMappingUpdater( private def agglomerateIdForSegmentId(segmentId: Long)(implicit ec: ExecutionContext): Fox[Long] = { val chunkId = segmentId / editableMappingService.defaultSegmentToAgglomerateChunkSize - val chunkKey = segmentToAgglomerateKey(editableMappingId, chunkId) + val chunkKey = segmentToAgglomerateKey(tracingId, chunkId) val chunkFromBufferOpt = getFromSegmentToAgglomerateBuffer(chunkKey) for { chunk <- Fox.fillOption(chunkFromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } agglomerateId <- chunk.get(segmentId) match { @@ -214,38 +214,35 @@ class EditableMappingUpdater( private def updateSegmentToAgglomerateChunk(agglomerateId: Long, chunkId: Long, segmentIdsToUpdate: Seq[Long])( implicit ec: ExecutionContext): Fox[Unit] = for { - existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" + existingChunk: Map[Long, Long] <- getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId) ?~> "failed to get old segment to agglomerate chunk for updating it" mergedMap = existingChunk ++ segmentIdsToUpdate.map(_ -> agglomerateId).toMap - _ = segmentToAgglomerateBuffer.put(segmentToAgglomerateKey(editableMappingId, chunkId), (mergedMap, false)) + _ = segmentToAgglomerateBuffer.put(segmentToAgglomerateKey(tracingId, chunkId), (mergedMap, false)) } yield () - private def getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId: String, chunkId: Long)( + private def getSegmentToAgglomerateChunkWithEmptyFallback(tracingId: String, chunkId: Long)( implicit ec: ExecutionContext): Fox[Map[Long, Long]] = { - val key = segmentToAgglomerateKey(editableMappingId, chunkId) + val key = segmentToAgglomerateKey(tracingId, chunkId) val fromBufferOpt = getFromSegmentToAgglomerateBuffer(key) Fox.fillOption(fromBufferOpt) { editableMappingService - .getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version = oldVersion) + .getSegmentToAgglomerateChunkWithEmptyFallback(tracingId, chunkId, version = oldVersion) .map(_.toMap) } } private def agglomerateGraphForIdWithFallback(mapping: EditableMappingInfo, agglomerateId: Long)( implicit ec: ExecutionContext): Fox[AgglomerateGraph] = { - val key = agglomerateGraphKey(editableMappingId, agglomerateId) + val key = agglomerateGraphKey(tracingId, agglomerateId) val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { - editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, - editableMappingId, - Some(oldVersion), - agglomerateId, - remoteFallbackLayer, - )(tokenContext) + editableMappingService + .getAgglomerateGraphForIdWithFallback(mapping, tracingId, Some(oldVersion), agglomerateId, remoteFallbackLayer, + )(tokenContext) } } private def updateAgglomerateGraph(agglomerateId: Long, graph: AgglomerateGraph): Unit = { - val key = agglomerateGraphKey(editableMappingId, agglomerateId) + val key = agglomerateGraphKey(tracingId, agglomerateId) agglomerateToGraphBuffer.put(key, (graph, false)) } @@ -264,7 +261,7 @@ class EditableMappingUpdater( if (edgesAndAffinitiesMinusOne.length == agglomerateGraph.edges.length) { if (relyOnAgglomerateIds) { logger.warn( - s"Split action for editable mapping $editableMappingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") + s"Split action for editable mapping $tracingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") } (agglomerateGraph, emptyAgglomerateGraph) } else { @@ -351,10 +348,10 @@ class EditableMappingUpdater( update.mag)(tokenContext) _ = if (segmentId1 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition1} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") _ = if (segmentId2 == 0) logger.warn( - s"Merge action for editable mapping $editableMappingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") + s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") (agglomerateId1, agglomerateId2) <- agglomerateIdsForMergeAction(update, segmentId1, segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" agglomerateGraph1 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId1) ?~> s"Failed to get agglomerate graph for id $agglomerateId1" agglomerateGraph2 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId2) ?~> s"Failed to get agglomerate graph for id $agglomerateId2" @@ -406,7 +403,7 @@ class EditableMappingUpdater( agglomerateId: Long): Unit = if (!isValid && relyOnAgglomerateIds) { logger.warn( - s"Merge action for editable mapping $editableMappingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" + s"Merge action for editable mapping $tracingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" ) } @@ -414,13 +411,12 @@ class EditableMappingUpdater( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" - oldInfo <- editableMappingService.getInfo(editableMappingId, - Some(revertAction.sourceVersion), - remoteFallbackLayer)(tokenContext) + oldInfo <- editableMappingService.getInfo(tracingId, Some(revertAction.sourceVersion), remoteFallbackLayer)( + tokenContext) _ = segmentToAgglomerateBuffer.clear() _ = agglomerateToGraphBuffer.clear() segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( - editableMappingId, + tracingId, tracingDataStore.editableMappingsSegmentToAgglomerate) _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { case (chunkKey, _, version) => @@ -437,7 +433,7 @@ class EditableMappingUpdater( } else Fox.successful(()) } agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( - editableMappingId, + tracingId, tracingDataStore.editableMappingsAgglomerateToGraph) _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { case (graphKey, _, version) => @@ -445,7 +441,7 @@ class EditableMappingUpdater( for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) _ <- editableMappingService - .getAgglomerateGraphForId(editableMappingId, + .getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer, Some(revertAction.sourceVersion))(tokenContext) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index ad512b77dd7..d11963ca32a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -134,6 +134,6 @@ class SkeletonTracingService @Inject()( def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(SkeletonTracing, String)])( - implicit tc: TokenContext): Fox[String] = + implicit tc: TokenContext): Fox[Unit] = Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 4882e4fc21e..69ff4ec495d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -23,11 +23,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.tracingstore.annotation.{ - AnnotationWithTracings, - TSAnnotationService, - UpdateActionGroup -} +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -995,17 +991,15 @@ class VolumeTracingService @Inject()( def dummyTracing: VolumeTracing = ??? def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(VolumeTracing, String)])( - implicit tc: TokenContext): Fox[String] = + implicit tc: TokenContext): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - editableMappingIds <- Fox.serialCombined(tracingsWithIds)(tracingWithId => tracingWithId._1.mappingName) - _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" - newEditableMappingId <- editableMappingService.merge(newTracingId, editableMappingIds, remoteFallbackLayer) - } yield newEditableMappingId + _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty } else { From 0044b29bb84f33811ff571656b40969ca5f6502e Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 10:47:00 +0200 Subject: [PATCH 063/361] editableMappingController --- .../EditableMappingController.scala | 204 ++++++++++++++++++ .../controllers/VolumeTracingController.scala | 190 +--------------- ...alableminds.webknossos.tracingstore.routes | 16 +- 3 files changed, 217 insertions(+), 193 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala new file mode 100644 index 00000000000..f514fc43e19 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -0,0 +1,204 @@ +package com.scalableminds.webknossos.tracingstore.controllers + +import com.google.inject.Inject +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.services.{ + AccessTokenService, + EditableMappingSegmentListResult, + UserAccessRequest +} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingService, + MinCutParameters, + NeighborsParameters +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{UpdateMappingNameVolumeAction, VolumeTracingService} +import net.liftweb.common.{Box, Empty, Failure, Full} +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers} + +import scala.concurrent.ExecutionContext + +class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, + accessTokenService: AccessTokenService, + editableMappingService: EditableMappingService, + annotationTransactionService: AnnotationTransactionService)( + implicit ec: ExecutionContext, + bodyParsers: PlayBodyParsers) + extends Controller { + + def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" + _ <- assertMappingIsNotLocked(tracing) + _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" + editableMappingInfo <- editableMappingService.create(tracingId, baseMappingName = tracingMappingName) + volumeUpdate = UpdateMappingNameVolumeAction(Some(tracingId), + isEditable = Some(true), + isLocked = Some(true), + actionTracingId = tracingId, + actionTimestamp = Some(System.currentTimeMillis())) + _ <- annotationTransactionService + .handleUpdateGroups( // TODO replace this route by the update action only? address editable mappings by volume tracing id? + annotationId, + List( + UpdateActionGroup(tracing.version + 1, + System.currentTimeMillis(), + None, + List(volumeUpdate), + None, + None, + "dummyTransactionId", + 1, + 0)) + ) + infoJson <- editableMappingService.infoJson(tracingId = tracingId, + editableMappingInfo = editableMappingInfo, + version = Some(0L)) + } yield Ok(infoJson) + } + } + } + + private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = + bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" + + /*// TODO integrate all of this into annotation update + + def updateEditableMapping(token: Option[String], + annotationId: String, + tracingId: String): Action[List[UpdateActionGroup]] = + Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { + for { + tracing <- tracingService.find(annotationId, tracingId) + mappingName <- tracing.mappingName.toFox + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) + _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" + updateGroup <- request.body.headOption.toFox + _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" + report = TracingUpdatesReport( + annotationId, // TODO integrate all of this into annotation update + timestamps = List(Instant(updateGroup.timestamp)), + statistics = None, + significantChangesCount = updateGroup.actions.length, + viewChangesCount = 0, + tokenContextForRequest.userTokenOpt + ) + _ <- remoteWebknossosClient.reportTracingUpdates(report) + remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version, remoteFallbackLayer) + } yield Ok + } + } + */ + + def editableMappingInfo(token: Option[String], + annotationId: String, + tracingId: String, + version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) + infoJson <- editableMappingService.infoJson(tracingId = tracingId, + editableMappingInfo = editableMappingInfo, + version = version) + } yield Ok(infoJson) + } + } + } + + def segmentIdsForAgglomerate(token: Option[String], + annotationId: String, + tracingId: String, + agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService + .getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer) + .futureBox + segmentIds <- agglomerateGraphBox match { + case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) + case Empty => Fox.successful(List.empty) + case f: Failure => f.toFox + } + agglomerateIdIsPresent = agglomerateGraphBox.isDefined + } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) + } + } + } + + def agglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String): Action[ListOfLong] = + Action.async(validateProto[ListOfLong]) { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( + tracingId, + requestedVersion = None, + remoteFallbackLayer = remoteFallbackLayer) + relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( + request.body.items.toSet, + editableMappingInfo, + editableMappingVersion, + tracingId, + remoteFallbackLayer) + agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) + } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) + } + } + } + + def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = + Action.async(validateJson[MinCutParameters]) { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + edges <- editableMappingService.agglomerateGraphMinCut(tracingId, request.body, remoteFallbackLayer) + } yield Ok(Json.toJson(edges)) + } + } + } + + def agglomerateGraphNeighbors(token: Option[String], + annotationId: String, + tracingId: String): Action[NeighborsParameters] = + Action.async(validateJson[NeighborsParameters]) { implicit request => + log() { + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + request.body, + remoteFallbackLayer) + } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) + } + } + } +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 3270a11a992..48e73e75094 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -5,8 +5,6 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph -import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.webknossos.datastore.helpers.{ @@ -22,23 +20,14 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosDataRequest } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{ - EditableMappingSegmentListResult, - FullMeshRequest, - UserAccessRequest -} -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} +import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationTransactionService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ - EditableMappingService, - MinCutParameters, - NeighborsParameters -} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MergedVolumeStats, ResolutionRestrictions, TSFullMeshService, - UpdateMappingNameVolumeAction, VolumeDataZipFormat, VolumeSegmentIndexService, VolumeSegmentStatisticsService, @@ -50,9 +39,8 @@ import com.scalableminds.webknossos.tracingstore.{ TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig, - TracingUpdatesReport } -import net.liftweb.common.{Box, Empty, Failure, Full} +import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile import play.api.libs.json.Json @@ -370,176 +358,6 @@ class VolumeTracingController @Inject()( } } - def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" - _ <- assertMappingIsNotLocked(tracing) - _ <- bool2Fox(tracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" - editableMappingInfo <- editableMappingService.create(tracingId, baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameVolumeAction(Some(tracingId), - isEditable = Some(true), - isLocked = Some(true), - actionTracingId = tracingId, - actionTimestamp = Some(System.currentTimeMillis())) - _ <- annotationTransactionService - .handleUpdateGroups( // TODO replace this route by the update action only? address editable mappings by volume tracing id? - annotationId, - List( - UpdateActionGroup(tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0)) - ) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingInfo = editableMappingInfo, - version = Some(0L)) - } yield Ok(infoJson) - } - } - } - - private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = - bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" - - def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = - Action.async(validateJson[MinCutParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(tracingId, request.body, remoteFallbackLayer) - } yield Ok(Json.toJson(edges)) - } - } - } - - def agglomerateGraphNeighbors(token: Option[String], - annotationId: String, - tracingId: String): Action[NeighborsParameters] = - Action.async(validateJson[NeighborsParameters]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, - request.body, - remoteFallbackLayer) - } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) - } - } - } - - def updateEditableMapping(token: Option[String], - annotationId: String, - tracingId: String): Action[List[UpdateActionGroup]] = - Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracing.mappingName.toFox - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) - _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" - updateGroup <- request.body.headOption.toFox - _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" - report = TracingUpdatesReport( - annotationId, // TODO integrate all of this into annotation update - timestamps = List(Instant(updateGroup.timestamp)), - statistics = None, - significantChangesCount = updateGroup.actions.length, - viewChangesCount = 0, - tokenContextForRequest.userTokenOpt - ) - _ <- remoteWebknossosClient.reportTracingUpdates(report) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version, remoteFallbackLayer) - } yield Ok - } - } - - def editableMappingInfo(token: Option[String], - annotationId: String, - tracingId: String, - version: Option[Long]): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingInfo = editableMappingInfo, - version = version) - } yield Ok(infoJson) - } - } - } - - def editableMappingAgglomerateIdsForSegments(token: Option[String], - annotationId: String, - tracingId: String): Action[ListOfLong] = - Action.async(validateProto[ListOfLong]) { implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( - tracingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer) - relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( - request.body.items.toSet, - editableMappingInfo, - editableMappingVersion, - tracingId, - remoteFallbackLayer) - agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) - } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) - } - } - } - - def editableMappingSegmentIdsForAgglomerate(token: Option[String], - annotationId: String, - tracingId: String, - agglomerateId: Long): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracing.mappingName.toFox - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(mappingName, agglomerateId, remoteFallbackLayer) - .futureBox - segmentIds <- agglomerateGraphBox match { - case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) - case Empty => Fox.successful(List.empty) - case f: Failure => f.toFox - } - agglomerateIdIsPresent = agglomerateGraphBox.isDefined - } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) - } - } - } - def getSegmentVolume(token: Option[String], annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 9e9a44a8253..f9a1e371dcd 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -28,9 +28,6 @@ POST /volume/:annotationId/:tracingId/importVolumeData @c POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], annotationId: String, tracingId: String, dryRun: Boolean) GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], annotationId: String, tracingId: String) GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) -POST /volume/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.makeMappingEditable(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateGraphNeighbors(token: Option[String], annotationId: String, tracingId: String) POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], annotationId: String, tracingId: String) POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], annotationId: String, tracingId: String) POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) @@ -38,10 +35,15 @@ POST /volume/mergedFromIds @c POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) # Editable Mappings -POST /mapping/:annotationId/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateEditableMapping(token: Option[String], annotationId: String, tracingId: String) -GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) -POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String) +# todo adapt frontend to mapping route prefix +POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(token: Option[String], annotationId: String, tracingId: String) +GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) +GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) +POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String) +# todo adapt frontend to mapping route prefix +POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String) +# todo adapt frontend to mapping route prefix +POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(token: Option[String], annotationId: String, tracingId: String) # Zarr endpoints for volume annotations # Zarr version 2 From d705aeb46773dd692a5f9828f032a0de6250aff7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 11:08:56 +0200 Subject: [PATCH 064/361] editable mapping info does not need version, mappingName anymore. remove dep from EditableMappingService to TSAnnotationService --- frontend/javascripts/admin/admin_rest_api.ts | 4 ++- .../oxalis/model/reducers/save_reducer.ts | 4 +-- .../oxalis/model/sagas/proofread_saga.ts | 11 +++++-- .../oxalis/model/sagas/save_saga.ts | 2 +- .../model/sagas/skeletontracing_saga.ts | 4 +-- .../javascripts/oxalis/view/version_view.tsx | 2 +- frontend/javascripts/types/api_flow_types.ts | 2 -- .../annotation/TSAnnotationService.scala | 8 +++++ .../EditableMappingController.scala | 17 ++++++----- .../controllers/VolumeTracingController.scala | 10 +++---- .../tracings/TracingService.scala | 6 +--- .../EditableMappingService.scala | 29 +++++-------------- .../skeleton/SkeletonTracingService.scala | 5 ---- .../volume/VolumeTracingService.scala | 5 ---- 14 files changed, 47 insertions(+), 62 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index ee6816d8845..e341bf573a1 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1633,7 +1633,9 @@ export function getEditableMappingInfo( tracingId: string, ): Promise { return doWithToken((token) => - Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/info?token=${token}`), + Request.receiveJSON( + `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/info?token=${token}`, + ), ); } diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 26a9f0e19ce..1fab6d3f403 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -40,9 +40,9 @@ function updateVersion(state: OxalisState, action: SetVersionNumberAction) { version: action.version, }); } else if (action.saveQueueType === "mapping") { - return updateEditableMapping(state, action.tracingId, { + /*return updateEditableMapping(state, action.tracingId, { version: action.version, - }); + });*/ } return state; diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 0fc1a9e5d78..e4e911bf1c6 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -276,13 +276,18 @@ function* createEditableMapping(): Saga { const volumeTracingId = upToDateVolumeTracing.tracingId; const layerName = volumeTracingId; - const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, annotationId, volumeTracingId); + const serverEditableMapping = yield* call( + makeMappingEditable, + tracingStoreUrl, + annotationId, + volumeTracingId, + ); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); - yield* put(setMappingNameAction(layerName, serverEditableMapping.mappingName, "HDF5")); + yield* put(setMappingNameAction(layerName, serverEditableMapping.tracingId, "HDF5")); yield* put(setHasEditableMappingAction()); yield* put(initializeEditableMappingAction(serverEditableMapping)); - return serverEditableMapping.mappingName; + return serverEditableMapping.tracingId; } function* ensureHdf5MappingIsEnabled(layerName: string): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 508a62a8e99..d94f8851f83 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -166,7 +166,7 @@ export function* sendSaveRequestToServer(): Saga { const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); const tracings = yield* select((state) => - _.compact([state.tracing.skeleton, ...state.tracing.volumes, ...state.tracing.mappings]), + _.compact([state.tracing.skeleton, ...state.tracing.volumes]), ); const version = _.max(tracings.map((t) => t.version)) || 0; const annotationId = yield* select((state) => state.tracing.annotationId); diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index b83a9531439..91e2d317cdf 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -250,9 +250,7 @@ function* getAgglomerateSkeletonTracing( const annotation = yield* select((state) => state.tracing); const layerInfo = getLayerByName(dataset, layerName); - const editableMapping = annotation.mappings.find( - (mapping) => mapping.mappingName === mappingName, - ); + const editableMapping = annotation.mappings.find((mapping) => mapping.tracingId === mappingName); try { let nmlProtoBuffer; diff --git a/frontend/javascripts/oxalis/view/version_view.tsx b/frontend/javascripts/oxalis/view/version_view.tsx index 8068ad59be6..4a8862b7719 100644 --- a/frontend/javascripts/oxalis/view/version_view.tsx +++ b/frontend/javascripts/oxalis/view/version_view.tsx @@ -87,7 +87,7 @@ class VersionView extends React.Component { this.props.tracing, mapping.tracingId, )} (Editable Mapping)`, - key: `${mapping.tracingId}-${mapping.mappingName}`, + key: mapping.tracingId, children: ( "applyUpdates.failed" } yield updated + def getEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[EditableMappingInfo] = + for { + annotation <- getWithTracings(annotationId, version, List(tracingId), List.empty) + tracing <- annotation.getEditableMappingInfo(tracingId) + } yield tracing + private def applyPendingUpdates(annotation: AnnotationProto, annotationId: String, targetVersionOpt: Option[Long], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index f514fc43e19..a8a9484ba11 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -11,7 +11,11 @@ import com.scalableminds.webknossos.datastore.services.{ EditableMappingSegmentListResult, UserAccessRequest } -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationTransactionService, + TSAnnotationService, + UpdateActionGroup +} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, MinCutParameters, @@ -25,6 +29,7 @@ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, accessTokenService: AccessTokenService, editableMappingService: EditableMappingService, annotationTransactionService: AnnotationTransactionService)( @@ -61,9 +66,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer 1, 0)) ) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingInfo = editableMappingInfo, - version = Some(0L)) + infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) } yield Ok(infoJson) } } @@ -113,10 +116,8 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - editableMappingInfo <- editableMappingService.getInfoNEW(annotationId, tracingId, version) - infoJson <- editableMappingService.infoJson(tracingId = tracingId, - editableMappingInfo = editableMappingInfo, - version = version) + editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version) + infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) } yield Ok(infoJson) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 48e73e75094..7d3d5871167 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -21,7 +21,7 @@ import com.scalableminds.webknossos.datastore.models.{ } import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} -import com.scalableminds.webknossos.tracingstore.annotation.AnnotationTransactionService +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.{ @@ -38,7 +38,7 @@ import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, - TracingStoreConfig, + TracingStoreConfig } import net.liftweb.common.Empty import play.api.i18n.Messages @@ -55,7 +55,7 @@ class VolumeTracingController @Inject()( val config: TracingStoreConfig, val remoteDataStoreClient: TSRemoteDatastoreClient, val accessTokenService: TracingStoreAccessTokenService, - annotationTransactionService: AnnotationTransactionService, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, val remoteWebknossosClient: TSRemoteWebknossosClient, @@ -261,12 +261,12 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.webknossos) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") - currentVersion <- tracingService.currentVersion(tracingId) + currentVersion <- annotationService.currentMaterializableVersion(tracingId) before = Instant.now canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing) processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)(tracingService .addSegmentIndex(annotationId, tracingId, tracing, currentVersion, dryRun)) ?~> "addSegmentIndex.failed" - currentVersionNew <- tracingService.currentVersion(tracingId) + currentVersionNew <- annotationService.currentMaterializableVersion(tracingId) _ <- Fox.runIf(!dryRun)(bool2Fox( processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") duration = Instant.since(before) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index f6a1d146e0c..ae23b35d8d4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -60,10 +60,6 @@ trait TracingService[T <: GeneratedMessage] private val handledGroupCacheExpiry: FiniteDuration = 24 hours - def currentVersion(tracingId: String): Fox[Long] - - def currentVersion(tracing: T): Long - private def transactionGroupKey(tracingId: String, transactionId: String, transactionGroupIndex: Int, version: Long) = s"transactionGroup___${tracingId}___${transactionId}___${transactionGroupIndex}___$version" @@ -125,7 +121,7 @@ trait TracingService[T <: GeneratedMessage] implicit tc: TokenContext): Fox[List[Option[T]]] = Fox.combined { selectors.map { - case Some(selector) => + case Some(selector) => // TODO TracingSelector needs annotationIds too find("dummyAnnotationid", selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) case None => Fox.successful(None) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index dcf2a0bbfef..3345d9d94be 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -22,7 +22,7 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } -import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{UpdateAction, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -91,7 +91,6 @@ object NodeWithPosition { class EditableMappingService @Inject()( val tracingDataStore: TracingDataStore, val adHocMeshServiceHolder: AdHocMeshServiceHolder, - annotationService: TSAnnotationService, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient )(implicit ec: ExecutionContext) @@ -117,18 +116,13 @@ class EditableMappingService @Inject()( private lazy val agglomerateToGraphCache: AlfuCache[(String, Long, Long), AgglomerateGraph] = AlfuCache(maxCapacity = 50) - def infoJson(tracingId: String, editableMappingInfo: EditableMappingInfo, version: Option[Long]): Fox[JsObject] = - for { - version <- getClosestMaterializableVersionOrZero(tracingId, version) - } yield - Json.obj( - "mappingName" -> tracingId, // TODO remove? - "version" -> version, - "tracingId" -> tracingId, - "baseMappingName" -> editableMappingInfo.baseMappingName, - "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, - "createdTimestamp" -> editableMappingInfo.createdTimestamp - ) + def infoJson(tracingId: String, editableMappingInfo: EditableMappingInfo): JsObject = + Json.obj( + "tracingId" -> tracingId, + "baseMappingName" -> editableMappingInfo.baseMappingName, + "largestAgglomerateId" -> editableMappingInfo.largestAgglomerateId, + "createdTimestamp" -> editableMappingInfo.createdTimestamp + ) def create(tracingId: String, baseMappingName: String): Fox[EditableMappingInfo] = { val newEditableMappingInfo = EditableMappingInfo( @@ -193,13 +187,6 @@ class EditableMappingService @Inject()( } yield () } - def getInfoNEW(annotationId: String, tracingId: String, version: Option[Long] = None)( - implicit tc: TokenContext): Fox[EditableMappingInfo] = - for { - annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty) - tracing <- annotation.getEditableMappingInfo(tracingId) - } yield tracing - def getInfo(tracingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[EditableMappingInfo] = for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index d11963ca32a..e952eb33f6f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -36,11 +36,6 @@ class SkeletonTracingService @Inject()( implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.skeletonUpdates.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - def currentVersion(tracing: SkeletonTracing): Long = tracing.version - def find(annotationId: String, tracingId: String, version: Option[Long] = None, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 69ff4ec495d..7d2f76dc0ed 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -96,11 +96,6 @@ class VolumeTracingService @Inject()( private val fallbackLayerCache: AlfuCache[(String, String, Option[String]), Option[RemoteFallbackLayer]] = AlfuCache( maxCapacity = 100) - override def currentVersion(tracingId: String): Fox[Long] = - tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - - override def currentVersion(tracing: VolumeTracing): Long = tracing.version - override protected def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, From ef0952863e13c2032b3af7ff356bf1d49b5e8d52 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 11:36:20 +0200 Subject: [PATCH 065/361] renamings, construct EditableMappingUpdater in TSAnnotationService --- .../controllers/BinaryDataController.scala | 18 ++--- .../controllers/DSMeshController.scala | 8 +-- .../controllers/DataSourceController.scala | 70 +++++++++---------- .../controllers/ExportsController.scala | 2 +- .../controllers/ZarrStreamingController.scala | 20 +++--- .../services/AccessTokenService.scala | 10 +-- .../annotation/AnnotationWithTracings.scala | 9 ++- .../annotation/TSAnnotationService.scala | 62 +++++++++++++--- .../EditableMappingController.scala | 12 ++-- .../SkeletonTracingController.scala | 4 +- .../controllers/TSAnnotationController.scala | 12 ++-- .../controllers/TracingController.scala | 10 +-- .../controllers/VolumeTracingController.scala | 32 ++++----- ...VolumeTracingZarrStreamingController.scala | 22 +++--- .../volume/VolumeTracingService.scala | 2 + 15 files changed, 171 insertions(+), 122 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 50541f438c9..514ea193faa 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -53,7 +53,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { logTime(slackNotificationService.noticeSlowRequest) { val t = Instant.now for { @@ -95,7 +95,7 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -119,7 +119,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -141,7 +141,7 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -175,7 +175,7 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -222,7 +222,7 @@ class BinaryDataController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -242,7 +242,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -283,7 +283,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -301,7 +301,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 062adc19c8b..0ee4cbfcd67 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -28,7 +28,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) } yield Ok(Json.toJson(meshFiles)) @@ -48,7 +48,7 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, @@ -79,7 +79,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield { @@ -95,7 +95,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { data: Array[Byte] <- fullMeshService.loadFor(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index ff5befccdc3..24b2c4f511f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -68,7 +68,7 @@ class DataSourceController @Inject()( def readInboxDataSource(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccessForSyncBlock( + accessTokenService.validateAccessFromTokenContextForSyncBlock( UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = @@ -81,7 +81,7 @@ class DataSourceController @Inject()( } def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- dataSourceService.checkInbox(verbose = true) } yield Ok @@ -90,7 +90,7 @@ class DataSourceController @Inject()( def reserveUpload(token: Option[String]): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organization)) { for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { @@ -103,7 +103,7 @@ class DataSourceController @Inject()( def getUnfinishedUploads(token: Option[String], organizationName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationName)) { for { unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(organizationName) unfinishedUploadsWithUploadIds <- uploadService.addUploadIdsToUnfinishedUploads(unfinishedUploads) @@ -115,7 +115,7 @@ class DataSourceController @Inject()( // and it can be put in a webknossos folder where they have access def reserveManualUpload(token: Option[String]): Action[ReserveManualUploadInformation] = Action.async(validateJson[ReserveManualUploadInformation]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organization)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organization)) { for { _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -166,7 +166,7 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(uploadFileId)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(uploadFileId) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -188,7 +188,7 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(resumableIdentifier)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(resumableIdentifier) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -204,7 +204,7 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId)) { + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" _ <- remoteWebknossosClient.reportUpload(dataSourceId, @@ -224,7 +224,7 @@ class DataSourceController @Inject()( case true => uploadService.getDataSourceIdByUploadId(request.body.uploadId) } dataSourceIdFox.flatMap { dataSourceId => - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" _ <- uploadService.cancelUpload(request.body) ?~> "Could not cancel the upload." @@ -239,7 +239,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( + accessTokenService.validateAccessFromTokenContextForSyncBlock( UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { addNoCacheHeaderFallback( Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetName, dataLayerName)))) @@ -252,7 +252,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetName, dataLayerName) @@ -268,7 +268,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, @@ -288,7 +288,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( @@ -306,7 +306,7 @@ class DataSourceController @Inject()( mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( @@ -323,7 +323,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox largestAgglomerateId: Long <- agglomerateService @@ -347,7 +347,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Seq[Long] <- agglomerateService @@ -372,7 +372,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Array[Long] <- agglomerateService @@ -391,7 +391,7 @@ class DataSourceController @Inject()( def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( @@ -407,7 +407,7 @@ class DataSourceController @Inject()( datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") @@ -435,7 +435,7 @@ class DataSourceController @Inject()( def createOrganizationDirectory(token: Option[String], organizationId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock(UserAccessRequest.administrateDataSources(organizationId)) { + accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.administrateDataSources(organizationId)) { val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") newOrganizationDirectory.mkdirs() if (newOrganizationDirectory.isDirectory) @@ -450,7 +450,7 @@ class DataSourceController @Inject()( datasetName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { for { before <- Fox.successful(System.currentTimeMillis()) usedStorageInBytes: List[DirectoryStorageReport] <- storageUsageService.measureStorage(organizationId, @@ -470,7 +470,7 @@ class DataSourceController @Inject()( datasetName: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetName, layerName) val reloadedDataSource = dataSourceService.dataSourceFromDir( @@ -493,7 +493,7 @@ class DataSourceController @Inject()( def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => val dataSourceId = DataSourceId(datasetName, organizationId) - accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { for { _ <- binaryDataServiceHolder.binaryDataService.deleteOnDisk( organizationId, @@ -506,7 +506,7 @@ class DataSourceController @Inject()( def compose(token: Option[String]): Action[ComposeRequest] = Action.async(validateJson[ComposeRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organizationId)) { for { _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)(id => accessTokenService.assertUserAccess( @@ -522,7 +522,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) for { @@ -543,7 +543,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -559,7 +559,7 @@ class DataSourceController @Inject()( dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -576,7 +576,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -591,7 +591,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -606,7 +606,7 @@ class DataSourceController @Inject()( dataSetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) @@ -623,7 +623,7 @@ class DataSourceController @Inject()( dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, @@ -660,7 +660,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -692,7 +692,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -714,7 +714,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -732,7 +732,7 @@ class DataSourceController @Inject()( // Called directly by wk side def exploreRemoteDataset(token: Option[String]): Action[ExploreRemoteDatasetRequest] = Action.async(validateJson[ExploreRemoteDatasetRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organizationId)) { val reportMutable = ListBuffer[String]() val hasLocalFilesystemRequest = request.body.layerParameters.exists(param => new URI(param.remoteUri).getScheme == DataVaultService.schemeFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index 009ac58d0f5..efdac2ed607 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -36,7 +36,7 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, override def allowRemoteOrigin: Boolean = true def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.downloadJobExport(jobId)) { for { exportProperties <- webknossosClient.getJobExportProperties(jobId) fullPath = exportProperties.fullPathIn(dataBaseDir) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 1f86aef3d08..a04a7d81c40 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -56,7 +56,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -73,7 +73,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -151,7 +151,7 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers @@ -232,7 +232,7 @@ class ZarrStreamingController @Inject()( mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) } } @@ -298,7 +298,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zArray(organizationId, datasetName, dataLayerName, mag) } } @@ -319,7 +319,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) } } @@ -389,7 +389,7 @@ class ZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) } } @@ -450,7 +450,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) } } @@ -507,7 +507,7 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND @@ -554,7 +554,7 @@ class ZarrStreamingController @Inject()( organizationId: String, datasetName: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessForSyncBlock( + accessTokenService.validateAccessFromTokenContextForSyncBlock( UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { Ok(zGroupJson) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 52b56268077..b44d93c0468 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -67,14 +67,14 @@ trait AccessTokenService { private lazy val accessAnswersCache: AlfuCache[(UserAccessRequest, Option[String]), UserAccessAnswer] = AlfuCache(timeToLive = AccessExpiration, timeToIdle = AccessExpiration) - def validateAccessForSyncBlock(accessRequest: UserAccessRequest)(block: => Result)(implicit ec: ExecutionContext, - tc: TokenContext): Fox[Result] = - validateAccess(accessRequest) { + def validateAccessFromTokenContextForSyncBlock(accessRequest: UserAccessRequest)( + block: => Result)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = + validateAccessFromTokenContext(accessRequest) { Future.successful(block) } - def validateAccess(accessRequest: UserAccessRequest)(block: => Future[Result])(implicit ec: ExecutionContext, - tc: TokenContext): Fox[Result] = + def validateAccessFromTokenContext(accessRequest: UserAccessRequest)( + block: => Future[Result])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Result] = for { userAccessAnswer <- hasUserAccess(accessRequest) ?~> "Failed to check data access, token may be expired, consider reloading." result <- executeBlockOnPositiveAnswer(userAccessAnswer, block) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index aad111d9e9b..ced731c2e80 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -50,6 +50,11 @@ case class AnnotationWithTracings( (info, _) <- editableMappingsByTracingId.get(tracingId) } yield info + def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = + for { + (_, updater) <- editableMappingsByTracingId.get(tracingId) + } yield updater + def version: Long = annotation.version def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = @@ -113,5 +118,7 @@ case class AnnotationWithTracings( def applyEditableMappingAction(a: EditableMappingUpdateAction)( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = - Fox.failure("not implemented yet") // TODO + for { + updater <- getEditableMappingUpdater("tracingId") // TODO editable mapping update actions need tracing id + } yield this // TODO } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 73decf3036d..eb422d9fff1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, EditableMappingUpdateAction, @@ -24,8 +25,16 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ BucketMutatingVolumeUpdateAction, VolumeUpdateAction } -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} -import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingUpdatesReport} +import com.scalableminds.webknossos.tracingstore.tracings.{ + KeyValueStoreImplicits, + RemoteFallbackLayer, + TracingDataStore +} +import com.scalableminds.webknossos.tracingstore.{ + TSRemoteDatastoreClient, + TSRemoteWebknossosClient, + TracingUpdatesReport +} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Full} import play.api.libs.json.{JsObject, JsValue, Json} @@ -34,6 +43,8 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, + editableMappingService: EditableMappingService, + remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits with LazyLogging { @@ -159,13 +170,18 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl updates, requestedSkeletonTracingIds, requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" - annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationWithTracings, updates) + annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationWithTracings, + updates, + annotation.version, + targetVersion) updated <- applyUpdates(annotationWithTracingsAndMappings, annotationId, updates, targetVersion) ?~> "applyUpdates.inner.failed" } yield updated private def findEditableMappingsForUpdates( // TODO integrate with findTracings? annotationWithTracings: AnnotationWithTracings, - updates: List[UpdateAction])(implicit ec: ExecutionContext) = { + updates: List[UpdateAction], + currentMaterializedVersion: Long, + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { val volumeIdsWithEditableMapping = annotationWithTracings.volumesIdsThatHaveEditableMapping // TODO intersect with editable mapping updates? for { @@ -176,11 +192,36 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield annotationWithTracings.copy( editableMappingsByTracingId = editableMappingInfos - .map(keyValuePair => (keyValuePair.key, (keyValuePair.value, editableMappingUpdaterFor(keyValuePair.value)))) + .map( + keyValuePair => + (keyValuePair.key, + (keyValuePair.value, + editableMappingUpdaterFor(keyValuePair.key, + keyValuePair.value, + currentMaterializedVersion, + targetVersion)))) .toMap) } - def editableMappingUpdaterFor(editableMappingInfo: EditableMappingInfo): EditableMappingUpdater = ??? // TODO + private def editableMappingUpdaterFor(tracingId: String, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext): EditableMappingUpdater = { + val remoteFallbackLayer + : RemoteFallbackLayer = RemoteFallbackLayer("todo", "todo", "todo", ElementClassProto.uint8) // TODO + new EditableMappingUpdater( + tracingId, + editableMappingInfo.baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + tracingDataStore, + relyOnAgglomerateIds = false // TODO + ) + } private def findTracingsForUpdates( annotation: AnnotationProto, @@ -217,11 +258,10 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, editableMappingsMap) } - private def applyUpdates( - annotation: AnnotationWithTracings, - annotationId: String, - updates: List[UpdateAction], - targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + private def applyUpdates(annotation: AnnotationWithTracings, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index a8a9484ba11..711748b2dda 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -40,7 +40,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" @@ -112,7 +112,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) @@ -128,7 +128,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) @@ -150,7 +150,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer def agglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) @@ -174,7 +174,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = Action.async(validateJson[MinCutParameters]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) @@ -190,7 +190,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracingId: String): Action[NeighborsParameters] = Action.async(validateJson[NeighborsParameters]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 9b9f9f191a9..86feebacce9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -38,7 +38,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer def mergedFromContents(token: Option[String], persist: Boolean): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracings: List[Option[SkeletonTracing]] = request.body for { mergedTracing <- Fox.box2Fox(tracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) @@ -59,7 +59,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index d398abbdc4f..e1ab8ec7070 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -29,7 +29,7 @@ class TSAnnotationController @Inject()( def save(token: Option[String], annotationId: String): Action[AnnotationProto] = Action.async(validateProto[AnnotationProto]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { // TODO assert id does not already exist _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) @@ -43,7 +43,7 @@ class TSAnnotationController @Inject()( Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.writeAnnotation(annotationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { for { _ <- annotationTransactionService.handleUpdateGroups(annotationId, request.body) } yield Ok @@ -57,7 +57,7 @@ class TSAnnotationController @Inject()( newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { updateLog <- annotationService.updateActionLog(annotationId, newestVersion, oldestVersion) } yield Ok(updateLog) @@ -68,7 +68,7 @@ class TSAnnotationController @Inject()( def newestVersion(token: Option[String], annotationId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { newestVersion <- annotationService.currentMaterializableVersion(annotationId) } yield JsonOk(Json.obj("version" -> newestVersion)) @@ -79,7 +79,7 @@ class TSAnnotationController @Inject()( def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { statistics <- annotationService.updateActionStatistics(tracingId) } yield Ok(statistics) @@ -91,7 +91,7 @@ class TSAnnotationController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.readAnnotation(annotationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { annotationProto <- annotationService.get(annotationId, version) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 4125161aaf1..8bc7d999ea1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -44,7 +44,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def save(token: Option[String]): Action[T] = Action.async(validateProto[T]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracing = request.body tracingService.save(tracing, None, 0).map { newId => Ok(Json.toJson(newId)) @@ -57,7 +57,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def saveMultiple(token: Option[String]): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[T] => tracingOpt match { case Some(tracing) => tracingService.save(tracing, None, 0).map(Some(_)) @@ -73,7 +73,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") @@ -85,7 +85,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { tracings <- tracingService.findMultiple(request.body, applyUpdates = true) } yield { @@ -98,7 +98,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( "tracing.notFound") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 7d3d5871167..ddba192d7bb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -86,7 +86,7 @@ class VolumeTracingController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") @@ -104,7 +104,7 @@ class VolumeTracingController @Inject()( def mergedFromContents(token: Option[String], persist: Boolean): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { _ <- Fox.successful(()) tracings = request.body @@ -124,7 +124,7 @@ class VolumeTracingController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") @@ -147,7 +147,7 @@ class VolumeTracingController @Inject()( voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox @@ -169,7 +169,7 @@ class VolumeTracingController @Inject()( def data(token: Option[String], annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) @@ -198,7 +198,7 @@ class VolumeTracingController @Inject()( boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") _ = logger.info(s"Duplicating volume tracing $tracingId...") @@ -237,7 +237,7 @@ class VolumeTracingController @Inject()( tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox @@ -258,7 +258,7 @@ class VolumeTracingController @Inject()( dryRun: Boolean): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- annotationService.currentMaterializableVersion(tracingId) @@ -284,7 +284,7 @@ class VolumeTracingController @Inject()( newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) } yield Ok(updateLog) @@ -296,7 +296,7 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. @@ -316,7 +316,7 @@ class VolumeTracingController @Inject()( def loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { data: Array[Byte] <- fullMeshService.loadFor(annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) @@ -331,7 +331,7 @@ class VolumeTracingController @Inject()( def findData(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { positionOpt <- tracingService.findData(annotationId, tracingId) } yield { @@ -345,7 +345,7 @@ class VolumeTracingController @Inject()( tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" @@ -362,7 +362,7 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracingService.baseMappingName(tracing) @@ -382,7 +382,7 @@ class VolumeTracingController @Inject()( annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) mappingName <- tracingService.baseMappingName(tracing) @@ -403,7 +403,7 @@ class VolumeTracingController @Inject()( tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId) tracing <- tracingService.find(annotationId, tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 856e0545ecb..488c0c14c54 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -62,7 +62,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) @@ -84,7 +84,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) @@ -101,7 +101,7 @@ class VolumeTracingZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND @@ -125,7 +125,7 @@ class VolumeTracingZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) @@ -138,7 +138,7 @@ class VolumeTracingZarrStreamingController @Inject()( def zArray(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) @@ -172,7 +172,7 @@ class VolumeTracingZarrStreamingController @Inject()( def zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND @@ -218,7 +218,7 @@ class VolumeTracingZarrStreamingController @Inject()( def zGroup(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } } @@ -233,7 +233,7 @@ class VolumeTracingZarrStreamingController @Inject()( annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND @@ -251,7 +251,7 @@ class VolumeTracingZarrStreamingController @Inject()( annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND @@ -272,7 +272,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND @@ -297,7 +297,7 @@ class VolumeTracingZarrStreamingController @Inject()( coordinates: String): Action[AnyContent] = Action.async { implicit request => { - accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 7d2f76dc0ed..c48e6ec3abe 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -271,6 +271,7 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } + /* // TODO private def revertToVolumeVersion(annotationId: String, tracingId: String, @@ -332,6 +333,7 @@ class VolumeTracingService @Inject()( _ <- segmentIndexBuffer.flush() } yield sourceTracing } + */ def initializeWithDataMultiple(annotationId: String, tracingId: String, tracing: VolumeTracing, initialData: File)( implicit mp: MessagesProvider, From 8883306f7d0a86226b23163f64e39696ea6c7219 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 11:46:23 +0200 Subject: [PATCH 066/361] remove token from parameter lists --- .../controllers/BinaryDataController.scala | 50 ++--- .../controllers/DSMeshController.scala | 36 ++- .../controllers/DataSourceController.scala | 206 +++++++++--------- .../controllers/ExportsController.scala | 2 +- .../controllers/ZarrStreamingController.scala | 153 ++++++------- ....scalableminds.webknossos.datastore.routes | 190 ++++++++-------- .../EditableMappingController.scala | 57 +++-- .../SkeletonTracingController.scala | 5 +- .../controllers/TSAnnotationController.scala | 39 ++-- .../controllers/TracingController.scala | 10 +- .../controllers/VolumeTracingController.scala | 66 ++---- ...VolumeTracingZarrStreamingController.scala | 40 +--- ...alableminds.webknossos.tracingstore.routes | 118 +++++----- 13 files changed, 445 insertions(+), 527 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 514ea193faa..02a48218fef 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -48,12 +48,12 @@ class BinaryDataController @Inject()( val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService def requestViaWebknossos( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { logTime(slackNotificationService.noticeSlowRequest) { val t = Instant.now for { @@ -77,7 +77,6 @@ class BinaryDataController @Inject()( * Handles requests for raw binary data via HTTP GET. */ def requestRawCuboid( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, @@ -95,7 +94,8 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -114,12 +114,12 @@ class BinaryDataController @Inject()( } def requestRawCuboidPost( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -132,8 +132,7 @@ class BinaryDataController @Inject()( /** * Handles a request for raw binary data via a HTTP GET. Used by knossos. */ - def requestViaKnossos(token: Option[String], - organizationId: String, + def requestViaKnossos(organizationId: String, datasetName: String, dataLayerName: String, resolution: Int, @@ -141,7 +140,8 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -160,8 +160,7 @@ class BinaryDataController @Inject()( } } - def thumbnailJpeg(token: Option[String], - organizationId: String, + def thumbnailJpeg(organizationId: String, datasetName: String, dataLayerName: String, x: Int, @@ -175,7 +174,8 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -216,13 +216,13 @@ class BinaryDataController @Inject()( } def mappingJson( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -237,12 +237,12 @@ class BinaryDataController @Inject()( /** * Handles ad-hoc mesh requests. */ - def requestAdHocMesh(token: Option[String], - organizationId: String, + def requestAdHocMesh(organizationId: String, datasetName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -278,12 +278,10 @@ class BinaryDataController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def findData(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -296,12 +294,10 @@ class BinaryDataController @Inject()( } } - def histogram(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def histogram(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 0ee4cbfcd67..eed1704178e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -23,32 +23,30 @@ class DSMeshController @Inject()( override def allowRemoteOrigin: Boolean = true - def listMeshFiles(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def listMeshFiles(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) } yield Ok(Json.toJson(meshFiles)) } } - def listMeshChunksForSegment(token: Option[String], - organizationId: String, + def listMeshChunksForSegment(organizationId: String, datasetName: String, dataLayerName: String, /* If targetMappingName is set, assume that meshfile contains meshes for - the oversegmentation. Collect mesh chunks of all *unmapped* segment ids - belonging to the supplied agglomerate id. - If it is not set, use meshfile as is, assume passed id is present in meshfile - Note: in case of an editable mapping, targetMappingName is its baseMapping name. + the oversegmentation. Collect mesh chunks of all *unmapped* segment ids + belonging to the supplied agglomerate id. + If it is not set, use meshfile as is, assume passed id is present in meshfile + Note: in case of an editable mapping, targetMappingName is its baseMapping name. */ targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, @@ -74,12 +72,12 @@ class DSMeshController @Inject()( } } - def readMeshChunk(token: Option[String], - organizationId: String, + def readMeshChunk(organizationId: String, datasetName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield { @@ -90,12 +88,10 @@ class DSMeshController @Inject()( } } - def loadFullMeshStl(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[FullMeshRequest] = + def loadFullMeshStl(organizationId: String, datasetName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { data: Array[Byte] <- fullMeshService.loadFor(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 24b2c4f511f..7d130381413 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -65,7 +65,7 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def readInboxDataSource(organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccessFromTokenContextForSyncBlock( @@ -80,7 +80,7 @@ class DataSourceController @Inject()( } } - def triggerInboxCheckBlocking(token: Option[String]): Action[AnyContent] = Action.async { implicit request => + def triggerInboxCheckBlocking(): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { _ <- dataSourceService.checkInbox(verbose = true) @@ -88,9 +88,10 @@ class DataSourceController @Inject()( } } - def reserveUpload(token: Option[String]): Action[ReserveUploadInformation] = + def reserveUpload(): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organization)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { @@ -101,7 +102,7 @@ class DataSourceController @Inject()( } } - def getUnfinishedUploads(token: Option[String], organizationName: String): Action[AnyContent] = + def getUnfinishedUploads(organizationName: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationName)) { for { @@ -113,9 +114,10 @@ class DataSourceController @Inject()( // To be called by people with disk access but not DatasetManager role. This way, they can upload a dataset manually on disk, // and it can be put in a webknossos folder where they have access - def reserveManualUpload(token: Option[String]): Action[ReserveManualUploadInformation] = + def reserveManualUpload(): Action[ReserveManualUploadInformation] = Action.async(validateJson[ReserveManualUploadInformation]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organization)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organization)) { for { _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -147,7 +149,7 @@ class DataSourceController @Inject()( - As GET parameter: - token (string): datastore token identifying the uploading user */ - def uploadChunk(token: Option[String]): Action[MultipartFormData[Files.TemporaryFile]] = + def uploadChunk(): Action[MultipartFormData[Files.TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => val uploadForm = Form( tuple( @@ -166,7 +168,8 @@ class DataSourceController @Inject()( for { dataSourceId <- uploadService.getDataSourceIdByUploadId( uploadService.extractDatasetUploadId(uploadFileId)) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { + result <- accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(dataSourceId)) { for { isKnownUpload <- uploadService.isKnownUploadByFileId(uploadFileId) _ <- bool2Fox(isKnownUpload) ?~> "dataset.upload.validation.failed" @@ -183,7 +186,7 @@ class DataSourceController @Inject()( ) } - def testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = + def testChunk(resumableChunkNumber: Int, resumableIdentifier: String): Action[AnyContent] = Action.async { implicit request => for { dataSourceId <- uploadService.getDataSourceIdByUploadId( @@ -198,26 +201,25 @@ class DataSourceController @Inject()( } yield result } - def finishUpload(token: Option[String]): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { - implicit request => - log() { - for { - dataSourceId <- uploadService - .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" - result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { - for { - (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" - _ <- remoteWebknossosClient.reportUpload(dataSourceId, - datasetSizeBytes, - request.body.needsConversion.getOrElse(false), - viaAddRoute = false) ?~> "reportUpload.failed" - } yield Ok - } - } yield result - } + def finishUpload(): Action[UploadInformation] = Action.async(validateJson[UploadInformation]) { implicit request => + log() { + for { + dataSourceId <- uploadService + .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" + result <- accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(dataSourceId)) { + for { + (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" + _ <- remoteWebknossosClient.reportUpload(dataSourceId, + datasetSizeBytes, + request.body.needsConversion.getOrElse(false), + viaAddRoute = false) ?~> "reportUpload.failed" + } yield Ok + } + } yield result + } } - def cancelUpload(token: Option[String]): Action[CancelUploadInformation] = + def cancelUpload(): Action[CancelUploadInformation] = Action.async(validateJson[CancelUploadInformation]) { implicit request => val dataSourceIdFox = uploadService.isKnownUpload(request.body.uploadId).flatMap { case false => Fox.failure("dataset.upload.validation.failed") @@ -234,7 +236,6 @@ class DataSourceController @Inject()( } def listMappings( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String @@ -247,12 +248,12 @@ class DataSourceController @Inject()( } def listAgglomerates( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetName, dataLayerName) @@ -261,14 +262,14 @@ class DataSourceController @Inject()( } def generateAgglomerateSkeleton( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, @@ -281,14 +282,14 @@ class DataSourceController @Inject()( } def agglomerateGraph( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( @@ -299,14 +300,14 @@ class DataSourceController @Inject()( } def positionForSegmentViaAgglomerateFile( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( @@ -317,13 +318,13 @@ class DataSourceController @Inject()( } def largestAgglomerateId( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox largestAgglomerateId: Long <- agglomerateService @@ -341,13 +342,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForSegmentIds( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Seq[Long] <- agglomerateService @@ -366,13 +367,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForAllSegmentIds( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateIds: Array[Long] <- agglomerateService @@ -389,9 +390,10 @@ class DataSourceController @Inject()( } } - def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = + def update(organizationId: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( @@ -402,10 +404,7 @@ class DataSourceController @Inject()( } // Stores a remote dataset in the database. - def add(token: Option[String], - organizationId: String, - datasetName: String, - folderId: Option[String]): Action[DataSource] = + def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { @@ -433,21 +432,19 @@ class DataSourceController @Inject()( } } - def createOrganizationDirectory(token: Option[String], organizationId: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.administrateDataSources(organizationId)) { - val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") - newOrganizationDirectory.mkdirs() - if (newOrganizationDirectory.isDirectory) - Ok - else - BadRequest - } + def createOrganizationDirectory(organizationId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.administrateDataSources(organizationId)) { + val newOrganizationDirectory = new File(f"${dataSourceService.dataBaseDir}/$organizationId") + newOrganizationDirectory.mkdirs() + if (newOrganizationDirectory.isDirectory) + Ok + else + BadRequest + } } - def measureUsedStorage(token: Option[String], - organizationId: String, - datasetName: Option[String] = None): Action[AnyContent] = + def measureUsedStorage(organizationId: String, datasetName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { @@ -465,10 +462,7 @@ class DataSourceController @Inject()( } } - def reload(token: Option[String], - organizationId: String, - datasetName: String, - layerName: Option[String] = None): Action[AnyContent] = + def reload(organizationId: String, datasetName: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = @@ -490,7 +484,7 @@ class DataSourceController @Inject()( } } - def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def deleteOnDisk(organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => val dataSourceId = DataSourceId(datasetName, organizationId) accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { @@ -504,9 +498,10 @@ class DataSourceController @Inject()( } } - def compose(token: Option[String]): Action[ComposeRequest] = + def compose(): Action[ComposeRequest] = Action.async(validateJson[ComposeRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organizationId)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { for { _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)(id => accessTokenService.assertUserAccess( @@ -517,12 +512,10 @@ class DataSourceController @Inject()( } } - def listConnectomeFiles(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[AnyContent] = + def listConnectomeFiles(organizationId: String, datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) for { @@ -538,12 +531,12 @@ class DataSourceController @Inject()( } } - def getSynapsesForAgglomerates(token: Option[String], - organizationId: String, + def getSynapsesForAgglomerates(organizationId: String, datasetName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -553,13 +546,13 @@ class DataSourceController @Inject()( } } - def getSynapticPartnerForSynapses(token: Option[String], - organizationId: String, + def getSynapticPartnerForSynapses(organizationId: String, datasetName: String, dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -571,12 +564,12 @@ class DataSourceController @Inject()( } } - def getSynapsePositions(token: Option[String], - organizationId: String, + def getSynapsePositions(organizationId: String, datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -586,12 +579,10 @@ class DataSourceController @Inject()( } } - def getSynapseTypes(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String): Action[BySynapseIdsRequest] = + def getSynapseTypes(organizationId: String, datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { meshFilePath <- Fox.successful( connectomeFileService @@ -601,12 +592,10 @@ class DataSourceController @Inject()( } } - def checkSegmentIndexFile(token: Option[String], - organizationId: String, - dataSetName: String, - dataLayerName: String): Action[AnyContent] = + def checkSegmentIndexFile(organizationId: String, dataSetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId))) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) @@ -617,13 +606,13 @@ class DataSourceController @Inject()( * Query the segment index file for a single segment * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag */ - def getSegmentIndex(token: Option[String], - organizationId: String, + def getSegmentIndex(organizationId: String, datasetName: String, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, @@ -655,12 +644,12 @@ class DataSourceController @Inject()( * Query the segment index file for multiple segments * @return List of bucketPositions as indices of 32³ buckets */ - def querySegmentIndex(token: Option[String], - organizationId: String, + def querySegmentIndex(organizationId: String, datasetName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -672,7 +661,7 @@ class DataSourceController @Inject()( request.body.editableMappingTracingId, segmentOrAgglomerateId, mappingNameForMeshFile = None, - omitMissing = true, // assume agglomerate ids not present in the mapping belong to user-brushed segments + omitMissing = true // assume agglomerate ids not present in the mapping belong to user-brushed segments ) fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => @@ -687,12 +676,12 @@ class DataSourceController @Inject()( } } - def getSegmentVolume(token: Option[String], - organizationId: String, + def getSegmentVolume(organizationId: String, datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -709,12 +698,12 @@ class DataSourceController @Inject()( } } - def getSegmentBoundingBox(token: Option[String], - organizationId: String, + def getSegmentBoundingBox(organizationId: String, datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -730,9 +719,10 @@ class DataSourceController @Inject()( } // Called directly by wk side - def exploreRemoteDataset(token: Option[String]): Action[ExploreRemoteDatasetRequest] = + def exploreRemoteDataset(): Action[ExploreRemoteDatasetRequest] = Action.async(validateJson[ExploreRemoteDatasetRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(request.body.organizationId)) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.administrateDataSources(request.body.organizationId)) { val reportMutable = ListBuffer[String]() val hasLocalFilesystemRequest = request.body.layerParameters.exists(param => new URI(param.remoteUri).getScheme == DataVaultService.schemeFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index efdac2ed607..e801528a4b3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -35,7 +35,7 @@ class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, override def allowRemoteOrigin: Boolean = true - def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => + def download(jobId: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.downloadJobExport(jobId)) { for { exportProperties <- webknossosClient.getJobExportProperties(jobId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index a04a7d81c40..358c74eb442 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -51,12 +51,12 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -68,12 +68,12 @@ class ZarrStreamingController @Inject()( } def requestZarrJson( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, @@ -88,9 +88,7 @@ class ZarrStreamingController @Inject()( } } - def zAttrsWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zAttrsWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, @@ -113,9 +111,7 @@ class ZarrStreamingController @Inject()( ) } - def zarrJsonWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String = ""): Action[AnyContent] = + def zarrJsonWithAnnotationPrivateLink(accessToken: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, @@ -146,12 +142,12 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers @@ -198,9 +194,7 @@ class ZarrStreamingController @Inject()( } } - def dataSourceWithAnnotationPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceWithAnnotationPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) ~> NOT_FOUND @@ -225,20 +219,19 @@ class ZarrStreamingController @Inject()( } def requestRawZarrCube( - token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) } } - def rawZarrCubePrivateLink(token: Option[String], - accessToken: String, + def rawZarrCubePrivateLink(accessToken: String, dataLayerName: String, mag: String, coordinates: String): Action[AnyContent] = @@ -292,13 +285,14 @@ class ZarrStreamingController @Inject()( _ <- bool2Fox(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" ~> NOT_FOUND } yield Ok(data) - def requestZArray(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - mag: String, + def requestZArray( + organizationId: String, + datasetName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zArray(organizationId, datasetName, dataLayerName, mag) } } @@ -313,13 +307,14 @@ class ZarrStreamingController @Inject()( zarrHeader = ZarrHeader.fromLayer(dataLayer, magParsed) } yield Ok(Json.toJson(zarrHeader)) - def requestZarrJsonForMag(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String, - mag: String, + def requestZarrJsonForMag( + organizationId: String, + datasetName: String, + dataLayerName: String, + mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) } } @@ -334,36 +329,32 @@ class ZarrStreamingController @Inject()( zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer) } yield Ok(Json.toJson(zarrHeader)) - def zArrayPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => - remoteTracingstoreClient - .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) - .map(z => Ok(Json.toJson(z))), - orElse = - annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) - ) + def zArrayPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = + annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + ) } - def zarrJsonPrivateLink(token: Option[String], - accessToken: String, - dataLayerName: String, - mag: String): Action[AnyContent] = Action.async { implicit request => - ifIsAnnotationLayerOrElse( - accessToken, - dataLayerName, - ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => - remoteTracingstoreClient - .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) - .map(z => Ok(Json.toJson(z))), - orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) - ) + def zarrJsonPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { + implicit request => + ifIsAnnotationLayerOrElse( + accessToken, + dataLayerName, + ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => + remoteTracingstoreClient + .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) + .map(z => Ok(Json.toJson(z))), + orElse = annotationSource => + zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + ) } private def ifIsAnnotationLayerOrElse( @@ -382,14 +373,14 @@ class ZarrStreamingController @Inject()( } } yield result - def requestDataLayerMagFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) } } @@ -413,8 +404,7 @@ class ZarrStreamingController @Inject()( additionalEntries )).withHeaders() - def dataLayerMagFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerMagFolderContentsPrivateLink(accessToken: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = @@ -445,12 +435,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataLayerFolderContents(token: Option[String], - organizationId: String, + def requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) } } @@ -474,8 +464,7 @@ class ZarrStreamingController @Inject()( additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) )).withHeaders() - def dataLayerFolderContentsPrivateLink(token: Option[String], - accessToken: String, + def dataLayerFolderContentsPrivateLink(accessToken: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => @@ -502,12 +491,12 @@ class ZarrStreamingController @Inject()( ) } - def requestDataSourceFolderContents(token: Option[String], - organizationId: String, + def requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { for { dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND @@ -523,9 +512,7 @@ class ZarrStreamingController @Inject()( } } - def dataSourceFolderContentsPrivateLink(token: Option[String], - accessToken: String, - zarrVersion: Int): Action[AnyContent] = + def dataSourceFolderContentsPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) @@ -550,19 +537,17 @@ class ZarrStreamingController @Inject()( )) } - def requestZGroup(token: Option[String], - organizationId: String, - datasetName: String, - dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { - Ok(zGroupJson) + def requestZGroup(organizationId: String, datasetName: String, dataLayerName: String = ""): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId))) { + Ok(zGroupJson) + } } - } private def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) - def zGroupPrivateLink(token: Option[String], accessToken: String, dataLayerName: String): Action[AnyContent] = + def zGroupPrivateLink(accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index a4dca523cde..6d8ae4626fc 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,122 +5,122 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(organizationId: String, datasetName: String, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mag:resolution/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mag:resolution/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetName: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName="") -GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetName: String, dataLayerName="") +GET /zarr/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(organizationId: String, datasetName: String, dataLayerName: String, mag: String) +GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName="") +GET /annotations/zarr/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zAttrsWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zGroupPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zArrayPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(organizationId: String, datasetName: String, dataLayerName: String) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(organizationId: String, datasetName: String, dataLayerName: String, mag: String) +GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetName: String, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(organizationId: String, datasetName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(organizationId: String, datasetName: String, dataLayerName: String) # Connectome files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, direction: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(organizationId: String, datasetName: String, dataLayerName: String, direction: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetName: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(organizationId: String, datasetName: String, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) +GET /datasets/:organizationId/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(organizationId: String, dataSetName: String, dataLayerName: String, segmentId: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(organizationId: String, dataSetName: String, dataLayerName: String) +POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(organizationId: String, dataSetName: String, dataLayerName: String) # DataSource management -GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String) -POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk(token: Option[String]) -GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(token: Option[String], organizationName: String) -POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload(token: Option[String]) -POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload(token: Option[String]) -POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload(token: Option[String]) -POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload(token: Option[String]) -GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetName: Option[String]) -GET /datasets/:organizationId/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetName: String) -POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetName: String) -PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) -DELETE /datasets/:organizationId/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetName: String) -POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) -POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) +GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(resumableChunkNumber: Int, resumableIdentifier: String) +POST /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.uploadChunk() +GET /datasets/getUnfinishedUploads @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getUnfinishedUploads(organizationName: String) +POST /datasets/reserveUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveUpload() +POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload() +POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload() +POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload() +GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(organizationId: String, datasetName: Option[String]) +GET /datasets/:organizationId/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(organizationId: String, datasetName: String) +POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(organizationId: String, datasetName: String) +PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(organizationId: String, datasetName: String, folderId: Option[String]) +DELETE /datasets/:organizationId/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(organizationId: String, datasetName: String) +POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() +POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() # Actions -POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String]) -POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationId: String) -POST /triggers/reload/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationId: String, datasetName: String, layerName: Option[String]) +POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() +POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(organizationId: String) +POST /triggers/reload/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetName: String, layerName: Option[String]) # Exports -GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(token: Option[String], jobId: String) +GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(jobId: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 711748b2dda..6ab093d315a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -37,7 +37,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer bodyParsers: PlayBodyParsers) extends Controller { - def makeMappingEditable(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = + def makeMappingEditable(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -77,7 +77,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer /*// TODO integrate all of this into annotation update - def updateEditableMapping(token: Option[String], + def updateEditableMapping( annotationId: String, tracingId: String): Action[List[UpdateActionGroup]] = Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => @@ -106,10 +106,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } */ - def editableMappingInfo(token: Option[String], - annotationId: String, - tracingId: String, - version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -123,31 +120,29 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def segmentIdsForAgglomerate(token: Option[String], - annotationId: String, - tracingId: String, - agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- volumeTracingService.find(annotationId, tracingId) - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer) - .futureBox - segmentIds <- agglomerateGraphBox match { - case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) - case Empty => Fox.successful(List.empty) - case f: Failure => f.toFox - } - agglomerateIdIsPresent = agglomerateGraphBox.isDefined - } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) + def segmentIdsForAgglomerate(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService + .getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer) + .futureBox + segmentIds <- agglomerateGraphBox match { + case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) + case Empty => Fox.successful(List.empty) + case f: Failure => f.toFox + } + agglomerateIdIsPresent = agglomerateGraphBox.isDefined + } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) + } } } - } - def agglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String): Action[ListOfLong] = + def agglomerateIdsForSegments(annotationId: String, tracingId: String): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -171,7 +166,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String): Action[MinCutParameters] = + def agglomerateGraphMinCut(annotationId: String, tracingId: String): Action[MinCutParameters] = Action.async(validateJson[MinCutParameters]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -185,9 +180,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateGraphNeighbors(token: Option[String], - annotationId: String, - tracingId: String): Action[NeighborsParameters] = + def agglomerateGraphNeighbors(annotationId: String, tracingId: String): Action[NeighborsParameters] = Action.async(validateJson[NeighborsParameters]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 86feebacce9..79ebfb8138b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -35,7 +35,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer implicit def unpackMultiple(tracings: SkeletonTracings): List[Option[SkeletonTracing]] = tracings.tracings.toList.map(_.tracing) - def mergedFromContents(token: Option[String], persist: Boolean): Action[SkeletonTracings] = + def mergedFromContents(persist: Boolean): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -49,8 +49,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer } } - def duplicate(token: Option[String], - annotationId: String, + def duplicate(annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index e1ab8ec7070..f4fe393cc77 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -26,7 +26,7 @@ class TSAnnotationController @Inject()( extends Controller with KeyValueStoreImplicits { - def save(token: Option[String], annotationId: String): Action[AnnotationProto] = + def save(annotationId: String): Action[AnnotationProto] = Action.async(validateProto[AnnotationProto]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -39,7 +39,7 @@ class TSAnnotationController @Inject()( } } - def update(token: Option[String], annotationId: String): Action[List[UpdateActionGroup]] = + def update(annotationId: String): Action[List[UpdateActionGroup]] = Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { @@ -52,8 +52,7 @@ class TSAnnotationController @Inject()( } } - def updateActionLog(token: Option[String], - annotationId: String, + def updateActionLog(annotationId: String, newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { @@ -65,29 +64,27 @@ class TSAnnotationController @Inject()( } } - def newestVersion(token: Option[String], annotationId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { - for { - newestVersion <- annotationService.currentMaterializableVersion(annotationId) - } yield JsonOk(Json.obj("version" -> newestVersion)) - } + def newestVersion(annotationId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { + for { + newestVersion <- annotationService.currentMaterializableVersion(annotationId) + } yield JsonOk(Json.obj("version" -> newestVersion)) } + } } - def updateActionStatistics(token: Option[String], tracingId: String): Action[AnyContent] = Action.async { - implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - statistics <- annotationService.updateActionStatistics(tracingId) - } yield Ok(statistics) - } + def updateActionStatistics(tracingId: String): Action[AnyContent] = Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + statistics <- annotationService.updateActionStatistics(tracingId) + } yield Ok(statistics) } + } } - def get(token: Option[String], annotationId: String, version: Option[Long]): Action[AnyContent] = + def get(annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 8bc7d999ea1..a8c1b04dd60 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -41,7 +41,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C override def allowRemoteOrigin: Boolean = true - def save(token: Option[String]): Action[T] = Action.async(validateProto[T]) { implicit request => + def save(): Action[T] = Action.async(validateProto[T]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -54,7 +54,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def saveMultiple(token: Option[String]): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => + def saveMultiple(): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -70,7 +70,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -82,7 +82,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def getMultiple(token: Option[String]): Action[List[Option[TracingSelector]]] = + def getMultiple: Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -95,7 +95,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def mergedFromIds(token: Option[String], persist: Boolean): Action[List[Option[TracingSelector]]] = + def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ddba192d7bb..d0cd69fd7cf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -78,8 +78,7 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def initialData(token: Option[String], - annotationId: String, + def initialData(annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]): Action[AnyContent] = @@ -101,7 +100,7 @@ class VolumeTracingController @Inject()( } } - def mergedFromContents(token: Option[String], persist: Boolean): Action[VolumeTracings] = + def mergedFromContents(persist: Boolean): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -120,7 +119,7 @@ class VolumeTracingController @Inject()( } } - def initialDataMultiple(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = + def initialDataMultiple(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { @@ -138,8 +137,7 @@ class VolumeTracingController @Inject()( } } - def allDataZip(token: Option[String], - annotationId: String, + def allDataZip(annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], @@ -166,7 +164,7 @@ class VolumeTracingController @Inject()( } } - def data(token: Option[String], annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = + def data(annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { @@ -186,8 +184,7 @@ class VolumeTracingController @Inject()( private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(token: Option[String], - annotationId: String, + def duplicate(annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], @@ -232,9 +229,7 @@ class VolumeTracingController @Inject()( } } - def importVolumeData(token: Option[String], - annotationId: String, - tracingId: String): Action[MultipartFormData[TemporaryFile]] = + def importVolumeData(annotationId: String, tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { @@ -252,10 +247,7 @@ class VolumeTracingController @Inject()( } } - def addSegmentIndex(token: Option[String], - annotationId: String, - tracingId: String, - dryRun: Boolean): Action[AnyContent] = + def addSegmentIndex(annotationId: String, tracingId: String, dryRun: Boolean): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -279,8 +271,7 @@ class VolumeTracingController @Inject()( } } - def updateActionLog(token: Option[String], - tracingId: String, + def updateActionLog(tracingId: String, newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => log() { @@ -292,9 +283,7 @@ class VolumeTracingController @Inject()( } } - def requestAdHocMesh(token: Option[String], - annotationId: String, - tracingId: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(annotationId: String, tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -314,7 +303,7 @@ class VolumeTracingController @Inject()( } } - def loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String): Action[FullMeshRequest] = + def loadFullMeshStl(annotationId: String, tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -329,21 +318,17 @@ class VolumeTracingController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - positionOpt <- tracingService.findData(annotationId, tracingId) - } yield { - Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) - } + def findData(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + positionOpt <- tracingService.findData(annotationId, tracingId) + } yield { + Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) } + } } - def agglomerateSkeleton(token: Option[String], - annotationId: String, - tracingId: String, - agglomerateId: Long): Action[AnyContent] = + def agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -358,9 +343,7 @@ class VolumeTracingController @Inject()( } } - def getSegmentVolume(token: Option[String], - annotationId: String, - tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -378,9 +361,7 @@ class VolumeTracingController @Inject()( } } - def getSegmentBoundingBox(token: Option[String], - annotationId: String, - tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -398,10 +379,7 @@ class VolumeTracingController @Inject()( } } - def getSegmentIndex(token: Option[String], - annotationId: String, - tracingId: String, - segmentId: Long): Action[GetSegmentIndexParameters] = + def getSegmentIndex(annotationId: String, tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 488c0c14c54..b1f58de065d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -57,10 +57,7 @@ class VolumeTracingZarrStreamingController @Inject()( override def defaultErrorCode: Int = NOT_FOUND - def volumeTracingFolderContent(token: Option[String], - annotationId: String, - tracingId: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -79,10 +76,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingFolderContentJson(token: Option[String], - annotationId: String, - tracingId: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -95,8 +89,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContent(token: Option[String], - annotationId: String, + def volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = @@ -119,8 +112,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContentJson(token: Option[String], - annotationId: String, + def volumeTracingMagFolderContentJson(annotationId: String, tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = @@ -136,7 +128,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zArray(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + def zArray(annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -170,7 +162,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + def zarrJsonForMag(annotationId: String, tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { @@ -216,11 +208,10 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zGroup(token: Option[String], annotationId: String, tracingId: String): Action[AnyContent] = Action.async { - implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) - } + def zGroup(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) + } } /** @@ -229,7 +220,6 @@ class VolumeTracingZarrStreamingController @Inject()( * Used by zarr-streaming. */ def zAttrs( - token: Option[String], annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => @@ -247,7 +237,6 @@ class VolumeTracingZarrStreamingController @Inject()( } def zarrJson( - token: Option[String], annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => @@ -266,8 +255,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrSource(token: Option[String], - annotationId: String, + def zarrSource(annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = @@ -290,11 +278,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def rawZarrCube(token: Option[String], - annotationId: String, - tracingId: String, - mag: String, - coordinates: String): Action[AnyContent] = + def rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index f9a1e371dcd..8787b4d32e9 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -6,76 +6,76 @@ GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health # Annotations (concerns AnnotationProto, not annotation info as stored in postgres) -POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(token: Option[String], annotationId: String) -GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(token: Option[String], annotationId: String, version: Option[Long]) -POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(token: Option[String], annotationId: String) -GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(token: Option[String], annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(token: Option[String], annotationId: String) -GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(token: Option[String], annotationId: String) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) +GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) +GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(token: Option[String]) -POST /volume/:annotationId/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(token: Option[String], annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) -POST /volume/:annotationId/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(token: Option[String], annotationId: String, tracingId: String) -GET /volume/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -GET /volume/:annotationId/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(token: Option[String], annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:annotationId/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -POST /volume/:annotationId/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], annotationId: String, tracingId: String, segmentId: Long) -POST /volume/:annotationId/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], annotationId: String, tracingId: String, dryRun: Boolean) -GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(token: Option[String], annotationId: String, tracingId: String) -GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) -POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(token: Option[String], annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(token: Option[String], annotationId: String, tracingId: String) -POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple(token: Option[String]) -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(token: Option[String], persist: Boolean) -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(token: Option[String], persist: Boolean) +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() +POST /volume/:annotationId/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) +POST /volume/:annotationId/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(annotationId: String, tracingId: String) +GET /volume/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(annotationId: String, tracingId: String, version: Option[Long]) +GET /volume/:annotationId/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +POST /volume/:annotationId/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /volume/:annotationId/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(annotationId: String, tracingId: String, segmentId: Long) +POST /volume/:annotationId/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(annotationId: String, tracingId: String, dryRun: Boolean) +GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(annotationId: String, tracingId: String) +GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long) +POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(annotationId: String, tracingId: String) +POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(annotationId: String, tracingId: String) +POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple +POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(persist: Boolean) +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings # todo adapt frontend to mapping route prefix -POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(token: Option[String], annotationId: String, tracingId: String) -GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(token: Option[String], annotationId: String, tracingId: String, agglomerateId: Long) -POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(token: Option[String], annotationId: String, tracingId: String) +POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(annotationId: String, tracingId: String) +GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]) +GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(annotationId: String, tracingId: String, agglomerateId: Long) +POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(annotationId: String, tracingId: String) # todo adapt frontend to mapping route prefix -POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(token: Option[String], annotationId: String, tracingId: String) +POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(annotationId: String, tracingId: String) # todo adapt frontend to mapping route prefix -POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(token: Option[String], annotationId: String, tracingId: String) +POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(annotationId: String, tracingId: String) # Zarr endpoints for volume annotations # Zarr version 2 -GET /volume/zarr/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(token: Option[String], annotationId: String, tracingId: String) -GET /volume/zarr/:annotationId/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(token: Option[String], annotationId: String, tracingId: String) -GET /volume/zarr/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(token: Option[String], annotationId: String, tracingId: String, mag: String) -GET /volume/zarr/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], annotationId: String, tracingId: String, mag: String, coordinates: String) +GET /volume/zarr/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(annotationId: String, tracingId: String) +GET /volume/zarr/:annotationId/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(annotationId: String, tracingId: String) +GET /volume/zarr/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:annotationId/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(annotationId: String, tracingId: String, mag: String) +GET /volume/zarr/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String) # Zarr version 3 -GET /volume/zarr3_experimental/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(token: Option[String], annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(token: Option[String], annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(token: Option[String], annotationId: String, tracingId: String) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(token: Option[String], annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(token: Option[String], annotationId: String, tracingId: String, mag: String) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(token: Option[String], annotationId: String, tracingId: String, mag: String, coordinates: String) +GET /volume/zarr3_experimental/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(annotationId: String, tracingId: String) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(annotationId: String, tracingId: String, mag: String) +GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String) # Skeleton tracings -POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save(token: Option[String]) -POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple(token: Option[String]) -POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(token: Option[String], persist: Boolean) -POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(token: Option[String], persist: Boolean) -GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(token: Option[String], annotationId: String, tracingId: String, version: Option[Long]) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple(token: Option[String]) -POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(token: Option[String], annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() +POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() +POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) +POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) +GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(annotationId: String, tracingId: String, version: Option[Long]) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple +POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From 2dc833e1795987fa0d81068d8b1582c70bb2fefe Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Sep 2024 11:58:27 +0200 Subject: [PATCH 067/361] apply editable mapping updates --- .../tracingstore/annotation/AnnotationWithTracings.scala | 7 +++++-- .../tracingstore/annotation/TSAnnotationService.scala | 1 + .../editablemapping/EditableMappingUpdateActions.scala | 6 ++++-- .../tracings/editablemapping/EditableMappingUpdater.scala | 2 +- 4 files changed, 11 insertions(+), 5 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index ced731c2e80..a0ba363d1cb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing @@ -119,6 +120,8 @@ case class AnnotationWithTracings( def applyEditableMappingAction(a: EditableMappingUpdateAction)( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - updater <- getEditableMappingUpdater("tracingId") // TODO editable mapping update actions need tracing id - } yield this // TODO + updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox // TODO editable mapping update actions need tracing id + info <- getEditableMappingInfo(a.actionTracingId).toFox + _ <- updater.applyOneUpdate(info, a) + } yield this // TODO replace info } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index eb422d9fff1..13e0eab006a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -281,6 +281,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl else { for { updated <- updateIter(Some(annotation), updates) + // TODO flush editable mapping updaters } yield updated.withVersion(targetVersion) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 7f037238097..2bbbda3643a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -1,11 +1,11 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction +import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} import play.api.libs.json.Format.GenericFormat import play.api.libs.json._ -trait EditableMappingUpdateAction extends UpdateAction +trait EditableMappingUpdateAction extends LayerUpdateAction // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. @@ -15,6 +15,7 @@ case class SplitAgglomerateUpdateAction(agglomerateId: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -40,6 +41,7 @@ case class MergeAgglomerateUpdateAction(agglomerateId1: Long, segmentId1: Option[Long], segmentId2: Option[Long], mag: Vec3Int, + actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index d0baa685453..8334d5138e1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -110,7 +110,7 @@ class EditableMappingUpdater( mappingFox } - private def applyOneUpdate(mapping: EditableMappingInfo, update: UpdateAction)( + def applyOneUpdate(mapping: EditableMappingInfo, update: UpdateAction)( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = update match { case splitAction: SplitAgglomerateUpdateAction => From 0dda12ad26c0ba7778138fb75f31b9caa1a1639f Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 19 Sep 2024 11:47:42 +0200 Subject: [PATCH 068/361] update editable mapping info in AnnotationWithTracings --- .../annotation/AnnotationWithTracings.scala | 40 ++++++------------- 1 file changed, 13 insertions(+), 27 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index a0ba363d1cb..bf5cc93b7fc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -59,54 +59,38 @@ case class AnnotationWithTracings( def version: Long = annotation.version def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings( - annotation.copy( + this.copy( + annotation = annotation.copy( layers = annotation.layers :+ AnnotationLayerProto( a.tracingId, a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), `type` = AnnotationLayerType.toProto(a.layerParameters.typ) - )), - tracingsById, - editableMappingsByTracingId + )) ) def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), - tracingsById, - editableMappingsByTracingId) + this.copy(annotation = annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(layers = annotation.layers.map(l => - if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l)), - tracingsById, - editableMappingsByTracingId) + this.copy(annotation = annotation.copy(layers = annotation.layers.map(l => + if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(name = a.name, description = a.description), - tracingsById, - editableMappingsByTracingId) - - def incrementVersion: AnnotationWithTracings = - AnnotationWithTracings(annotation.copy(version = annotation.version + 1L), - tracingsById, - editableMappingsByTracingId) + this.copy(annotation = annotation.copy(name = a.name, description = a.description)) def withVersion(newVersion: Long): AnnotationWithTracings = { val tracingsUpdated = tracingsById.view.mapValues { case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) } - AnnotationWithTracings(annotation.copy(version = newVersion), tracingsUpdated.toMap, editableMappingsByTracingId) + this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) } def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { skeletonTracing <- getSkeleton(a.actionTracingId) updated = a.applyOn(skeletonTracing) - } yield - AnnotationWithTracings(annotation, - tracingsById.updated(a.actionTracingId, Left(updated)), - editableMappingsByTracingId) + } yield this.copy(tracingsById = tracingsById.updated(a.actionTracingId, Left(updated))) def applyVolumeAction(a: ApplyableVolumeUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { @@ -122,6 +106,8 @@ case class AnnotationWithTracings( for { updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox // TODO editable mapping update actions need tracing id info <- getEditableMappingInfo(a.actionTracingId).toFox - _ <- updater.applyOneUpdate(info, a) - } yield this // TODO replace info + updated <- updater.applyOneUpdate(info, a) + } yield + this.copy( + editableMappingsByTracingId = editableMappingsByTracingId.updated(a.actionTracingId, (updated, updater))) } From 520d54b4f66acbc51e431e401cc5e25e8b443816 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 19 Sep 2024 13:57:41 +0200 Subject: [PATCH 069/361] iterate on editable mappings --- frontend/javascripts/admin/admin_rest_api.ts | 8 +++++--- .../oxalis/model/sagas/proofread_saga.ts | 8 ++++++-- .../webknossos/datastore/rpc/RPCRequest.scala | 3 ++- .../annotation/TSAnnotationService.scala | 20 +++++++++++-------- .../EditableMappingController.scala | 15 +++++++------- .../EditableMappingService.scala | 10 ++++++---- ...alableminds.webknossos.tracingstore.routes | 3 --- 7 files changed, 39 insertions(+), 28 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e341bf573a1..75faa599ae8 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1619,7 +1619,7 @@ export function makeMappingEditable( ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/volume/${annotationId}/${tracingId}/makeMappingEditable?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/makeMappingEditable?token=${token}`, { method: "POST", }, @@ -2310,6 +2310,7 @@ type MinCutTargetEdge = { }; export async function getEdgesForAgglomerateMinCut( tracingStoreUrl: string, + annotationId: string, tracingId: string, segmentsInfo: { segmentId1: NumberLike; @@ -2321,7 +2322,7 @@ export async function getEdgesForAgglomerateMinCut( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphMinCut?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomerateGraphMinCut?token=${token}`, { data: { ...segmentsInfo, @@ -2343,6 +2344,7 @@ export type NeighborInfo = { export async function getNeighborsForAgglomerateNode( tracingStoreUrl: string, tracingId: string, + annotationId: string, segmentInfo: { segmentId: NumberLike; mag: Vector3; @@ -2352,7 +2354,7 @@ export async function getNeighborsForAgglomerateNode( ): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphNeighbors?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomerateGraphNeighbors?token=${token}`, { data: { ...segmentInfo, diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index e4e911bf1c6..b9776143517 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -284,10 +284,10 @@ function* createEditableMapping(): Saga { ); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); - yield* put(setMappingNameAction(layerName, serverEditableMapping.tracingId, "HDF5")); + yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); yield* put(setHasEditableMappingAction()); yield* put(initializeEditableMappingAction(serverEditableMapping)); - return serverEditableMapping.tracingId; + return volumeTracingId; } function* ensureHdf5MappingIsEnabled(layerName: string): Saga { @@ -546,6 +546,7 @@ function* performMinCut( } const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); const segmentsInfo = { segmentId1: sourceSegmentId, segmentId2: targetSegmentId, @@ -557,6 +558,7 @@ function* performMinCut( const edgesToRemove = yield* call( getEdgesForAgglomerateMinCut, tracingStoreUrl, + annotationId, volumeTracingId, segmentsInfo, ); @@ -607,6 +609,7 @@ function* performCutFromNeighbors( { didCancel: false; neighborInfo: NeighborInfo } | { didCancel: true; neighborInfo?: null } > { const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); const segmentsInfo = { segmentId, mag: agglomerateFileMag, @@ -617,6 +620,7 @@ function* performCutFromNeighbors( const neighborInfo = yield* call( getNeighborsForAgglomerateNode, tracingStoreUrl, + annotationId, volumeTracingId, segmentsInfo, ); diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index d26675b7e6d..4bd9e2872eb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -11,6 +11,7 @@ import play.api.libs.ws._ import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import java.io.File +import java.nio.charset.StandardCharsets import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -203,7 +204,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: Full(result) } else { val errorMsg = s"Unsuccessful WS request to $url (ID: $id)." + - s"Status: ${result.status}. Response: ${result.bodyAsBytes.map(_.toChar).mkString.take(2000)}" + s"Status: ${result.status}. Response: ${new String(result.bodyAsBytes.toArray, StandardCharsets.UTF_8).take(2000)}" logger.error(errorMsg) Failure(errorMsg.take(400)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 13e0eab006a..0cbe103f4d9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.Fox.option2Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing @@ -23,6 +23,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, BucketMutatingVolumeUpdateAction, + UpdateMappingNameVolumeAction, VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -94,13 +95,16 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl Fox.successful(annotationWithTracings.updateMetadata(a)) case a: SkeletonUpdateAction => annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" + case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => + + TODO in case mapping is made editable, add it to the AnnotationWithTracings object here + case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. - // TODO make Mapping Editable // Note: UpdateBucketVolumeActions are not handled here, but instead eagerly on saving. case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } @@ -152,8 +156,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl implicit ec: ExecutionContext, tc: TokenContext): Fox[EditableMappingInfo] = for { - annotation <- getWithTracings(annotationId, version, List(tracingId), List.empty) - tracing <- annotation.getEditableMappingInfo(tracingId) + annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId)) + tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing private def applyPendingUpdates(annotation: AnnotationProto, @@ -183,6 +187,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl currentMaterializedVersion: Long, targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { val volumeIdsWithEditableMapping = annotationWithTracings.volumesIdsThatHaveEditableMapping + logger.info(s"fetching editable mappings ${volumeIdsWithEditableMapping.mkString(",")}") // TODO intersect with editable mapping updates? for { editableMappingInfos <- Fox.serialCombined(volumeIdsWithEditableMapping) { volumeTracingId => @@ -236,9 +241,8 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case u: VolumeUpdateAction => Some(u.actionTracingId) case _ => None } ++ requestedVolumeTracingIds).distinct - // TODO fetch editable mappings + instantiate editableMappingUpdaters/buffers if there are updates for them - val editableMappingsMap: Map[String, (EditableMappingInfo, EditableMappingUpdater)] = Map.empty - logger.info(s"fetching volumes ${volumeTracingIds} and skeletons $skeletonTracingIds") + + logger.info(s"fetching volumes $volumeTracingIds and skeletons $skeletonTracingIds") for { skeletonTracings <- Fox.serialCombined(skeletonTracingIds)( id => @@ -255,7 +259,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) .toMap - } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, editableMappingsMap) + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) } private def applyUpdates(annotation: AnnotationWithTracings, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 6ab093d315a..f61fc5bcf59 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -6,11 +6,8 @@ import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.controllers.Controller -import com.scalableminds.webknossos.datastore.services.{ - AccessTokenService, - EditableMappingSegmentListResult, - UserAccessRequest -} +import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} +import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, TSAnnotationService, @@ -30,7 +27,7 @@ import scala.concurrent.ExecutionContext class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, annotationService: TSAnnotationService, - accessTokenService: AccessTokenService, + accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, annotationTransactionService: AnnotationTransactionService)( implicit ec: ExecutionContext, @@ -174,7 +171,11 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - edges <- editableMappingService.agglomerateGraphMinCut(tracingId, request.body, remoteFallbackLayer) + editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) + edges <- editableMappingService.agglomerateGraphMinCut(tracingId, + editableMappingInfo, + request.body, + remoteFallbackLayer) } yield Ok(Json.toJson(edges)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 3345d9d94be..19c7aeb5660 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -581,12 +581,14 @@ class EditableMappingService @Inject()( } } yield agglomerateGraph - def agglomerateGraphMinCut(tracingId: String, parameters: MinCutParameters, remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = + def agglomerateGraphMinCut( + tracingId: String, + editableMappingInfo: EditableMappingInfo, + parameters: MinCutParameters, + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = for { // called here to ensure updates are applied - mapping <- getInfo(tracingId, version = None, remoteFallbackLayer) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, tracingId, None, parameters.agglomerateId, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 8787b4d32e9..5cb35653209 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -35,14 +35,11 @@ POST /volume/mergedFromIds @c POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings -# todo adapt frontend to mapping route prefix POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(annotationId: String, tracingId: String) GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]) GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(annotationId: String, tracingId: String, agglomerateId: Long) POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(annotationId: String, tracingId: String) -# todo adapt frontend to mapping route prefix POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(annotationId: String, tracingId: String) -# todo adapt frontend to mapping route prefix POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(annotationId: String, tracingId: String) # Zarr endpoints for volume annotations From 10206508ac3df4e1f1bb7d0651e11e15fd51cbf0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 10:27:55 +0200 Subject: [PATCH 070/361] =?UTF-8?q?add=20editable=20mapping=20to=20Annotat?= =?UTF-8?q?ionWithTracings=20when=20it=E2=80=99s=20created?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../annotation/AnnotationWithTracings.scala | 11 ++++- .../annotation/TSAnnotationService.scala | 49 +++++++++++++------ 2 files changed, 45 insertions(+), 15 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index bf5cc93b7fc..aa721910fba 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -12,7 +12,10 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingUpdater } import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction -import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ApplyableVolumeUpdateAction, + UpdateMappingNameVolumeAction +} import net.liftweb.common.{Box, Failure, Full} import scala.concurrent.ExecutionContext @@ -86,6 +89,12 @@ case class AnnotationWithTracings( this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) } + def addEditableMapping(volumeTracingId: String, + editableMappingInfo: EditableMappingInfo, + updater: EditableMappingUpdater): AnnotationWithTracings = + this.copy(editableMappingsByTracingId = + editableMappingsByTracingId.updated(volumeTracingId, (editableMappingInfo, updater))) + def applySkeletonAction(a: SkeletonUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { skeletonTracing <- getSkeleton(a.actionTracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 0cbe103f4d9..f2c14bb679b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -29,7 +29,8 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, RemoteFallbackLayer, - TracingDataStore + TracingDataStore, + VersionedKeyValuePair } import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, @@ -81,8 +82,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield updateActionGroups.reverse.flatten } - private def applyUpdate(annotationWithTracings: AnnotationWithTracings, updateAction: UpdateAction)( - implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + private def applyUpdate( + annotationWithTracings: AnnotationWithTracings, + updateAction: UpdateAction, + targetVersion: Long // Note: this is not the target version of this one update, but of all pending + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => @@ -96,16 +100,16 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case a: SkeletonUpdateAction => annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => - - TODO in case mapping is made editable, add it to the AnnotationWithTracings object here - + for { + withNewEditableMapping <- addEditableMapping(annotationWithTracings, a, targetVersion) + withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) + } yield withApplyedVolumeAction case a: ApplyableVolumeUpdateAction => annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. - // Note: UpdateBucketVolumeActions are not handled here, but instead eagerly on saving. case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated @@ -160,6 +164,18 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing + // move the functions that construct the AnnotationWithTracigns elsewhere? + private def addEditableMapping(annotationWithTracings: AnnotationWithTracings, + action: UpdateMappingNameVolumeAction, + targetVersion: Long)(implicit tc: TokenContext): Fox[AnnotationWithTracings] = + for { + editableMappingInfo <- getEditableMappingInfoFromStore(action.actionTracingId, annotationWithTracings.version) + updater = editableMappingUpdaterFor(action.actionTracingId, + editableMappingInfo.value, + annotationWithTracings.version, + targetVersion) + } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo.value, updater) + private def applyPendingUpdates(annotation: AnnotationProto, annotationId: String, targetVersionOpt: Option[Long], @@ -191,8 +207,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl // TODO intersect with editable mapping updates? for { editableMappingInfos <- Fox.serialCombined(volumeIdsWithEditableMapping) { volumeTracingId => - tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(annotationWithTracings.version))( - fromProtoBytes[EditableMappingInfo]) + getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) } } yield annotationWithTracings.copy( @@ -208,6 +223,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl .toMap) } + private def getEditableMappingInfoFromStore(volumeTracingId: String, + version: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = + tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( + fromProtoBytes[EditableMappingInfo]) + private def editableMappingUpdaterFor(tracingId: String, editableMappingInfo: EditableMappingInfo, currentMaterializedVersion: Long, @@ -262,10 +282,11 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) } - private def applyUpdates(annotation: AnnotationWithTracings, - annotationId: String, - updates: List[UpdateAction], - targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + private def applyUpdates( + annotation: AnnotationWithTracings, + annotationId: String, + updates: List[UpdateAction], + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = @@ -276,7 +297,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl case List() => Fox.successful(annotationWithTracings) case RevertToVersionUpdateAction(sourceVersion, _, _, _) :: tail => ??? - case update :: tail => updateIter(applyUpdate(annotationWithTracings, update), tail) + case update :: tail => updateIter(applyUpdate(annotationWithTracings, update, targetVersion), tail) } case _ => annotationWithTracingsFox } From e21bc6e8e85a05e6e2bd540809db9dfeb53c0285 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 10:58:45 +0200 Subject: [PATCH 071/361] fix agglomeratesForSegments route --- frontend/javascripts/admin/admin_rest_api.ts | 3 ++- frontend/javascripts/oxalis/model/sagas/mapping_saga.ts | 1 + frontend/javascripts/oxalis/model/sagas/proofread_saga.ts | 6 ++++-- .../tracingstore/annotation/TSAnnotationService.scala | 2 +- .../controllers/EditableMappingController.scala | 7 ++----- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 75faa599ae8..9e3e4ebda6a 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2122,6 +2122,7 @@ export async function getAgglomeratesForSegmentsFromDatastore( tracingStoreUrl: string, + annotationId: string, tracingId: string, segmentIds: Array, ): Promise { @@ -2131,7 +2132,7 @@ export async function getAgglomeratesForSegmentsFromTracingstore Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomeratesForSegments?token=${token}`, { method: "POST", body: segmentIdBuffer, diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 1a0044c4370..ddfb14597ca 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -455,6 +455,7 @@ function* updateLocalHdf5Mapping( ? yield* call( getAgglomeratesForSegmentsFromTracingstore, annotation.tracingStore.url, + annotation.annotationId, editableMapping.tracingId, Array.from(newSegmentIds), ) diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index b9776143517..653f5f5919a 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -1282,12 +1282,14 @@ function* splitAgglomerateInMapping( .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) .map(([segmentId, _agglomerateId]) => segmentId); - const tracingStoreHost = yield* select((state) => state.tracing.tracingStore.url); + const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const annotationId = yield* select((state) => state.tracing.annotationId); // Ask the server to map the (split) segment ids. This creates a partial mapping // that only contains these ids. const mappingAfterSplit = yield* call( getAgglomeratesForSegmentsFromTracingstore, - tracingStoreHost, + tracingStoreUrl, + annotationId, volumeTracingId, splitSegmentIds, ); diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index f2c14bb679b..c5e9fb6fb13 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -160,7 +160,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl implicit ec: ExecutionContext, tc: TokenContext): Fox[EditableMappingInfo] = for { - annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId)) + annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId)) ?~> "getWithTracings.failed" tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index f61fc5bcf59..77fe91f0be8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -147,14 +147,11 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( - tracingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer) + editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version = None) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, - editableMappingVersion, + tracing.version, tracingId, remoteFallbackLayer) agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) From f92d9330453666601d526b387c81305defbbbce4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 11:39:06 +0200 Subject: [PATCH 072/361] fix mincut route --- .../annotation/AnnotationWithTracings.scala | 2 +- .../EditableMappingController.scala | 21 ++++++++++++- .../controllers/VolumeTracingController.scala | 15 ---------- .../EditableMappingService.scala | 30 +++++++++---------- .../EditableMappingUpdater.scala | 14 ++++----- ...alableminds.webknossos.tracingstore.routes | 3 +- 6 files changed, 45 insertions(+), 40 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index aa721910fba..a20643fa51a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -54,7 +54,7 @@ case class AnnotationWithTracings( (info, _) <- editableMappingsByTracingId.get(tracingId) } yield info - def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = + private def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = for { (_, updater) <- editableMappingsByTracingId.get(tracingId) } yield updater diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 77fe91f0be8..e93196c3b12 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -126,7 +126,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer) + .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId, remoteFallbackLayer) .futureBox segmentIds <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) @@ -170,6 +170,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) edges <- editableMappingService.agglomerateGraphMinCut(tracingId, + tracing.version, editableMappingInfo, request.body, remoteFallbackLayer) @@ -187,10 +188,28 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + tracing.version, request.body, remoteFallbackLayer) } yield Ok(Json.obj("segmentId" -> segmentId, "neighbors" -> Json.toJson(edges))) } } } + + def agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + tracing <- volumeTracingService.find(annotationId, tracingId) + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" + editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) + remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(tracingId, + tracing.version, + editableMappingInfo, + remoteFallbackLayer, + agglomerateId) + } yield Ok(agglomerateSkeletonBytes) + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index d0cd69fd7cf..8a2c81a3006 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -328,21 +328,6 @@ class VolumeTracingController @Inject()( } } - def agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - tracing <- tracingService.find(annotationId, tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" - mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(mappingName, - remoteFallbackLayer, - agglomerateId) - } yield Ok(agglomerateSkeletonBytes) - } - } - def getSegmentVolume(annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 19c7aeb5660..0ff3d9d9922 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -416,12 +416,12 @@ class EditableMappingService @Inject()( } yield editableMappingForSegmentIds ++ baseMappingSubset def getAgglomerateSkeletonWithFallback(tracingId: String, + version: Long, + editableMappingInfo: EditableMappingInfo, remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { - // called here to ensure updates are applied - editableMappingInfo <- getInfo(tracingId, version = None, remoteFallbackLayer) - agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId, remoteFallbackLayer).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) @@ -546,19 +546,17 @@ class EditableMappingService @Inject()( } yield result def getAgglomerateGraphForId( - mappingId: String, + tracingId: String, + version: Long, agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - requestedVersion: Option[Long] = None)(implicit tc: TokenContext): Fox[AgglomerateGraph] = + remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - // called here to ensure updates are applied - (_, version) <- getInfoAndActualVersion(mappingId, requestedVersion, remoteFallbackLayer) agglomerateGraph <- agglomerateToGraphCache.getOrLoad( - (mappingId, agglomerateId, version), + (tracingId, agglomerateId, version), _ => for { graphBytes: VersionedKeyValuePair[Array[Byte]] <- tracingDataStore.editableMappingsAgglomerateToGraph - .get(agglomerateGraphKey(mappingId, agglomerateId), Some(version), mayBeEmpty = Some(true)) + .get(agglomerateGraphKey(tracingId, agglomerateId), Some(version), mayBeEmpty = Some(true)) graphParsed <- if (isRevertedElement(graphBytes.value)) Fox.empty else fromProtoBytes[AgglomerateGraph](graphBytes.value).toFox } yield graphParsed @@ -568,11 +566,11 @@ class EditableMappingService @Inject()( def getAgglomerateGraphForIdWithFallback( mapping: EditableMappingInfo, tracingId: String, - version: Option[Long], + version: Long, agglomerateId: Long, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, agglomerateId, remoteFallbackLayer, version).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId, remoteFallbackLayer).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => @@ -583,6 +581,7 @@ class EditableMappingService @Inject()( def agglomerateGraphMinCut( tracingId: String, + version: Long, editableMappingInfo: EditableMappingInfo, parameters: MinCutParameters, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[List[EdgeWithPositions]] = @@ -590,9 +589,9 @@ class EditableMappingService @Inject()( // called here to ensure updates are applied agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, tracingId, - None, + version, parameters.agglomerateId, - remoteFallbackLayer) + remoteFallbackLayer) ?~> "getAgglomerateGraph.failed" edgesToCut <- minCut(agglomerateGraph, parameters.segmentId1, parameters.segmentId2) ?~> "Could not calculate min-cut on agglomerate graph." edgesWithPositions = annotateEdgesWithPositions(edgesToCut, agglomerateGraph) } yield edgesWithPositions @@ -651,6 +650,7 @@ class EditableMappingService @Inject()( def agglomerateGraphNeighbors( tracingId: String, + version: Long, parameters: NeighborsParameters, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { @@ -658,7 +658,7 @@ class EditableMappingService @Inject()( mapping <- getInfo(tracingId, version = None, remoteFallbackLayer) agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, tracingId, - None, + version, parameters.agglomerateId, remoteFallbackLayer) neighborNodes = neighbors(agglomerateGraph, parameters.segmentId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 8334d5138e1..3d5a2d0b5d3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -235,9 +235,11 @@ class EditableMappingUpdater( val key = agglomerateGraphKey(tracingId, agglomerateId) val fromBufferOpt = getFromAgglomerateToGraphBuffer(key) fromBufferOpt.map(Fox.successful(_)).getOrElse { - editableMappingService - .getAgglomerateGraphForIdWithFallback(mapping, tracingId, Some(oldVersion), agglomerateId, remoteFallbackLayer, - )(tokenContext) + editableMappingService.getAgglomerateGraphForIdWithFallback(mapping, + tracingId, + oldVersion, + agglomerateId, + remoteFallbackLayer)(tokenContext) } } @@ -441,10 +443,8 @@ class EditableMappingUpdater( for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) _ <- editableMappingService - .getAgglomerateGraphForId(tracingId, - agglomerateId, - remoteFallbackLayer, - Some(revertAction.sourceVersion))(tokenContext) + .getAgglomerateGraphForId(tracingId, revertAction.sourceVersion, agglomerateId, remoteFallbackLayer)( + tokenContext) .futureBox .map { case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 5cb35653209..a7b9619e365 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -27,7 +27,6 @@ POST /volume/:annotationId/:tracingId/segmentIndex/:segmentId @c POST /volume/:annotationId/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(annotationId: String, tracingId: String) POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(annotationId: String, tracingId: String, dryRun: Boolean) GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(annotationId: String, tracingId: String) -GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long) POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(annotationId: String, tracingId: String) POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(annotationId: String, tracingId: String) POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple @@ -41,6 +40,8 @@ GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @c POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(annotationId: String, tracingId: String) POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(annotationId: String, tracingId: String) POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(annotationId: String, tracingId: String) +# TODO rename +GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long) # Zarr endpoints for volume annotations # Zarr version 2 From 19f39e5b96b6386572b6feb3681e80522190528d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 11:49:07 +0200 Subject: [PATCH 073/361] wip build correct remoteFallbackLayer for updater --- .../annotation/TSAnnotationService.scala | 61 +++++++++++-------- 1 file changed, 35 insertions(+), 26 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c5e9fb6fb13..ef59199f212 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -27,6 +27,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{ + FallbackDataHelper, KeyValueStoreImplicits, RemoteFallbackLayer, TracingDataStore, @@ -49,6 +50,7 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits + with FallbackDataHelper with LazyLogging { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = @@ -165,15 +167,18 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl } yield tracing // move the functions that construct the AnnotationWithTracigns elsewhere? - private def addEditableMapping(annotationWithTracings: AnnotationWithTracings, - action: UpdateMappingNameVolumeAction, - targetVersion: Long)(implicit tc: TokenContext): Fox[AnnotationWithTracings] = + private def addEditableMapping( + annotationWithTracings: AnnotationWithTracings, + action: UpdateMappingNameVolumeAction, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[AnnotationWithTracings] = for { editableMappingInfo <- getEditableMappingInfoFromStore(action.actionTracingId, annotationWithTracings.version) - updater = editableMappingUpdaterFor(action.actionTracingId, - editableMappingInfo.value, - annotationWithTracings.version, - targetVersion) + volumeTracing <- annotationWithTracings.getVolume(action.actionTracingId).toFox + updater <- editableMappingUpdaterFor(action.actionTracingId, + volumeTracing, + editableMappingInfo.value, + annotationWithTracings.version, + targetVersion) } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo.value, updater) private def applyPendingUpdates(annotation: AnnotationProto, @@ -216,7 +221,9 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl keyValuePair => (keyValuePair.key, (keyValuePair.value, + // TODO this returns Fox now editableMappingUpdaterFor(keyValuePair.key, + // TODO fetch volume tracing keyValuePair.value, currentMaterializedVersion, targetVersion)))) @@ -228,25 +235,27 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( fromProtoBytes[EditableMappingInfo]) - private def editableMappingUpdaterFor(tracingId: String, - editableMappingInfo: EditableMappingInfo, - currentMaterializedVersion: Long, - targetVersion: Long)(implicit tc: TokenContext): EditableMappingUpdater = { - val remoteFallbackLayer - : RemoteFallbackLayer = RemoteFallbackLayer("todo", "todo", "todo", ElementClassProto.uint8) // TODO - new EditableMappingUpdater( - tracingId, - editableMappingInfo.baseMappingName, - currentMaterializedVersion, - targetVersion, - remoteFallbackLayer, - tc, - remoteDatastoreClient, - editableMappingService, - tracingDataStore, - relyOnAgglomerateIds = false // TODO - ) - } + private def editableMappingUpdaterFor( + tracingId: String, + volumeTracing: VolumeTracing, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[EditableMappingUpdater] = + for { + remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(volumeTracing, tracingId) + } yield + new EditableMappingUpdater( + tracingId, + editableMappingInfo.baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + tracingDataStore, + relyOnAgglomerateIds = false // TODO should we? + ) private def findTracingsForUpdates( annotation: AnnotationProto, From 831bf7a3f710c4eda37df75d7978e30cab20d295 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 13:01:12 +0200 Subject: [PATCH 074/361] pass volumetracing info to updater --- .../annotation/AnnotationWithTracings.scala | 4 +-- .../annotation/TSAnnotationService.scala | 35 ++++++++----------- 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index a20643fa51a..d0435b1b35e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -43,9 +43,9 @@ case class AnnotationWithTracings( } } yield volumeTracing - def volumesIdsThatHaveEditableMapping: List[String] = + def volumesThatHaveEditableMapping: List[(VolumeTracing, String)] = tracingsById.view.flatMap { - case (id, Right(vt: VolumeTracing)) if vt.getHasEditableMapping => Some(id) + case (id, Right(vt: VolumeTracing)) if vt.getHasEditableMapping => Some((vt, id)) case _ => None }.toList diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ef59199f212..e1b3ae531dd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -45,9 +45,9 @@ import play.api.libs.json.{JsObject, JsValue, Json} import javax.inject.Inject import scala.concurrent.ExecutionContext -class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosClient, +class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient, editableMappingService: EditableMappingService, - remoteDatastoreClient: TSRemoteDatastoreClient, + val remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits with FallbackDataHelper @@ -207,27 +207,22 @@ class TSAnnotationService @Inject()(remoteWebknossosClient: TSRemoteWebknossosCl updates: List[UpdateAction], currentMaterializedVersion: Long, targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { - val volumeIdsWithEditableMapping = annotationWithTracings.volumesIdsThatHaveEditableMapping - logger.info(s"fetching editable mappings ${volumeIdsWithEditableMapping.mkString(",")}") + val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping + logger.info(s"fetching editable mappings ${volumeWithEditableMapping.map(_._2).mkString(",")}") // TODO intersect with editable mapping updates? for { - editableMappingInfos <- Fox.serialCombined(volumeIdsWithEditableMapping) { volumeTracingId => - getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) + idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { + case (volumeTracing, volumeTracingId) => + for { + editableMappingInfo <- getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) + updater <- editableMappingUpdaterFor(volumeTracingId, + volumeTracing, + editableMappingInfo.value, + currentMaterializedVersion, + targetVersion) + } yield (editableMappingInfo.key, (editableMappingInfo.value, updater)) } - } yield - annotationWithTracings.copy( - editableMappingsByTracingId = editableMappingInfos - .map( - keyValuePair => - (keyValuePair.key, - (keyValuePair.value, - // TODO this returns Fox now - editableMappingUpdaterFor(keyValuePair.key, - // TODO fetch volume tracing - keyValuePair.value, - currentMaterializedVersion, - targetVersion)))) - .toMap) + } yield annotationWithTracings.copy(editableMappingsByTracingId = idInfoUpdaterTuples.toMap) } private def getEditableMappingInfoFromStore(volumeTracingId: String, From e5cd917c3ccf9a704feb45ae1cf471be7ae7b603 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 14:47:07 +0200 Subject: [PATCH 075/361] flush editable mapping updater --- .../services/DSRemoteTracingstoreClient.scala | 2 ++ .../annotation/AnnotationWithTracings.scala | 8 ++++++++ .../tracingstore/annotation/TSAnnotationService.scala | 3 ++- .../editablemapping/EditableMappingUpdater.scala | 10 ++++++++-- 4 files changed, 20 insertions(+), 3 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala index 5bd69d4d7c9..42473d29a21 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala @@ -70,9 +70,11 @@ class DSRemoteTracingstoreClient @Inject()( rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId").withTokenFromContext .getWithJsonResponse[List[String]] + // TODO annotation id def getZGroup(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[JsObject] = rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup").withTokenFromContext.getWithJsonResponse[JsObject] + // TODO annotation id def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, tracingId: String, agglomerateId: Long)( implicit tc: TokenContext): Fox[EditableMappingSegmentListResult] = rpc(s"$tracingStoreUri/tracings/mapping/$tracingId/segmentsForAgglomerate") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index d0435b1b35e..01022928517 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -119,4 +119,12 @@ case class AnnotationWithTracings( } yield this.copy( editableMappingsByTracingId = editableMappingsByTracingId.updated(a.actionTracingId, (updated, updater))) + + def flushBufferedUpdates()(implicit ec: ExecutionContext): Fox[Unit] = { + val updaters = editableMappingsByTracingId.values.map(_._2).toList + for { + _ <- Fox.serialCombined(updaters)(updater => updater.flushBuffersToFossil()) + } yield () + } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index e1b3ae531dd..09842a764ba 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -310,7 +310,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss else { for { updated <- updateIter(Some(annotation), updates) - // TODO flush editable mapping updaters + _ <- updated.flushBufferedUpdates() + // todo: save materialized tracings + editable mapping info } yield updated.withVersion(targetVersion) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 3d5a2d0b5d3..e40475db5e5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -60,13 +60,19 @@ class EditableMappingUpdater( dry: Boolean = false)(implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { updatedEditableMappingInfo: EditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) - _ <- Fox.runIf(!dry)(flushToFossil(updatedEditableMappingInfo)) + _ <- Fox.runIf(!dry)(flushBuffersToFossil()) + _ <- Fox.runIf(!dry)(flushUpdatedInfoToFossil(updatedEditableMappingInfo)) } yield updatedEditableMappingInfo - private def flushToFossil(updatedEditableMappingInfo: EditableMappingInfo)(implicit ec: ExecutionContext): Fox[Unit] = + def flushBuffersToFossil()(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.serialCombined(segmentToAgglomerateBuffer.keys.toList)(flushSegmentToAgglomerateChunk) _ <- Fox.serialCombined(agglomerateToGraphBuffer.keys.toList)(flushAgglomerateGraph) + } yield () + + private def flushUpdatedInfoToFossil(updatedEditableMappingInfo: EditableMappingInfo)( + implicit ec: ExecutionContext): Fox[Unit] = + for { _ <- tracingDataStore.editableMappingsInfo.put(tracingId, newVersion, updatedEditableMappingInfo) } yield () From e4825a604cbb455d512de33dd0d5c39cbba399bc Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 23 Sep 2024 14:58:47 +0200 Subject: [PATCH 076/361] imports --- .../tracingstore/annotation/AnnotationWithTracings.scala | 5 +---- .../tracingstore/annotation/TSAnnotationService.scala | 2 -- .../editablemapping/EditableMappingService.scala | 9 ++++----- .../editablemapping/EditableMappingUpdater.scala | 3 +-- .../tracings/volume/VolumeUpdateActions.scala | 1 - 5 files changed, 6 insertions(+), 14 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 01022928517..e8c59a7010d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -12,10 +12,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingUpdater } import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - ApplyableVolumeUpdateAction, - UpdateMappingNameVolumeAction -} +import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction import net.liftweb.common.{Box, Failure, Full} import scala.concurrent.ExecutionContext diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 09842a764ba..984cfb6cef9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, EditableMappingUpdateAction, @@ -29,7 +28,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, - RemoteFallbackLayer, TracingDataStore, VersionedKeyValuePair } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 0ff3d9d9922..86dd974e376 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -545,11 +545,10 @@ class EditableMappingService @Inject()( result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result - def getAgglomerateGraphForId( - tracingId: String, - version: Long, - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = + def getAgglomerateGraphForId(tracingId: String, + version: Long, + agglomerateId: Long, + remoteFallbackLayer: RemoteFallbackLayer): Fox[AgglomerateGraph] = for { agglomerateGraph <- agglomerateToGraphCache.getOrLoad( (tracingId, agglomerateId, version), diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index e40475db5e5..97ee1af37d8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -70,8 +70,7 @@ class EditableMappingUpdater( _ <- Fox.serialCombined(agglomerateToGraphBuffer.keys.toList)(flushAgglomerateGraph) } yield () - private def flushUpdatedInfoToFossil(updatedEditableMappingInfo: EditableMappingInfo)( - implicit ec: ExecutionContext): Fox[Unit] = + private def flushUpdatedInfoToFossil(updatedEditableMappingInfo: EditableMappingInfo): Fox[Unit] = for { _ <- tracingDataStore.editableMappingsInfo.put(tracingId, newVersion, updatedEditableMappingInfo) } yield () diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index d00a7707d10..7cfc4b44a09 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -1,6 +1,5 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume -import java.util.Base64 import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.webknossos.datastore.VolumeTracing.{Segment, SegmentGroup, VolumeTracing} import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto From 4cc4647ed636e38b338dec70c908de8bbeca453d Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 24 Sep 2024 13:51:07 +0200 Subject: [PATCH 077/361] fix segment stats request uri --- .../right-border-tabs/segments_tab/segments_view_helper.tsx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 0440554ef95..818b990e681 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -50,7 +50,8 @@ export function getVolumeRequestUrl( return `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${visibleSegmentationLayer.name}`; } else { const tracingStoreHost = tracing?.tracingStore.url; - return `${tracingStoreHost}/tracings/volume/${tracingId}`; + const annotationId = tracing?.annotationId; + return `${tracingStoreHost}/tracings/volume/${annotationId}/${tracingId}`; } } From 8c11da294dccf98e84c26fec1918a8064f17faa8 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 24 Sep 2024 14:23:57 +0200 Subject: [PATCH 078/361] wip: fix some more functions --- .../annotation/TSAnnotationService.scala | 21 ++- .../EditableMappingController.scala | 2 + .../controllers/VolumeTracingController.scala | 5 +- .../EditableMappingService.scala | 138 ++---------------- .../EditableMappingUpdater.scala | 11 +- .../volume/VolumeTracingService.scala | 2 +- 6 files changed, 41 insertions(+), 138 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 984cfb6cef9..0e77b6177e7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -82,7 +82,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield updateActionGroups.reverse.flatten } + // TODO option to dry apply? private def applyUpdate( + annotationId: String, annotationWithTracings: AnnotationWithTracings, updateAction: UpdateAction, targetVersion: Long // Note: this is not the target version of this one update, but of all pending @@ -101,7 +103,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => for { - withNewEditableMapping <- addEditableMapping(annotationWithTracings, a, targetVersion) + withNewEditableMapping <- addEditableMapping(annotationId, annotationWithTracings, a, targetVersion) withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) } yield withApplyedVolumeAction case a: ApplyableVolumeUpdateAction => @@ -166,13 +168,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss // move the functions that construct the AnnotationWithTracigns elsewhere? private def addEditableMapping( + annotationId: String, annotationWithTracings: AnnotationWithTracings, action: UpdateMappingNameVolumeAction, targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[AnnotationWithTracings] = for { editableMappingInfo <- getEditableMappingInfoFromStore(action.actionTracingId, annotationWithTracings.version) volumeTracing <- annotationWithTracings.getVolume(action.actionTracingId).toFox - updater <- editableMappingUpdaterFor(action.actionTracingId, + updater <- editableMappingUpdaterFor(annotationId, + action.actionTracingId, volumeTracing, editableMappingInfo.value, annotationWithTracings.version, @@ -193,7 +197,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updates, requestedSkeletonTracingIds, requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" - annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationWithTracings, + annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationId, + annotationWithTracings, updates, annotation.version, targetVersion) @@ -201,6 +206,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield updated private def findEditableMappingsForUpdates( // TODO integrate with findTracings? + annotationId: String, annotationWithTracings: AnnotationWithTracings, updates: List[UpdateAction], currentMaterializedVersion: Long, @@ -213,7 +219,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case (volumeTracing, volumeTracingId) => for { editableMappingInfo <- getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) - updater <- editableMappingUpdaterFor(volumeTracingId, + updater <- editableMappingUpdaterFor(annotationId, + volumeTracingId, volumeTracing, editableMappingInfo.value, currentMaterializedVersion, @@ -229,6 +236,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss fromProtoBytes[EditableMappingInfo]) private def editableMappingUpdaterFor( + annotationId: String, tracingId: String, volumeTracing: VolumeTracing, editableMappingInfo: EditableMappingInfo, @@ -238,6 +246,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(volumeTracing, tracingId) } yield new EditableMappingUpdater( + annotationId, tracingId, editableMappingInfo.baseMappingName, currentMaterializedVersion, @@ -246,6 +255,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tc, remoteDatastoreClient, editableMappingService, + this, tracingDataStore, relyOnAgglomerateIds = false // TODO should we? ) @@ -299,7 +309,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case List() => Fox.successful(annotationWithTracings) case RevertToVersionUpdateAction(sourceVersion, _, _, _) :: tail => ??? - case update :: tail => updateIter(applyUpdate(annotationWithTracings, update, targetVersion), tail) + case update :: tail => + updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) } case _ => annotationWithTracingsFox } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index e93196c3b12..24e41a83f88 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -187,7 +187,9 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, + editableMappingInfo, tracing.version, request.body, remoteFallbackLayer) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 8a2c81a3006..b8233ba03cd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -207,8 +207,9 @@ class VolumeTracingController @Inject()( remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) newTracingId = tracingService.generateTracingId - _ <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt)) + // TODO + /*_ <- Fox.runIf(tracing.getHasEditableMapping)( + editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ (newId, newTracing) <- tracingService.duplicate( annotationId, tracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 86dd974e376..c48834d0992 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -108,6 +108,7 @@ class EditableMappingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService + // TODO private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) private lazy val segmentToAgglomerateChunkCache: AlfuCache[(String, Long, Long), Seq[(Long, Long)]] = @@ -135,6 +136,7 @@ class EditableMappingService @Inject()( } yield newEditableMappingInfo } + /* TODO def duplicate(sourceTracingId: String, newTracingId: String, version: Option[Long], @@ -153,6 +155,8 @@ class EditableMappingService @Inject()( } } yield () + */ + private def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { val iterator = new VersionedFossilDbIterator(sourceTracingId, @@ -187,135 +191,11 @@ class EditableMappingService @Inject()( } yield () } - def getInfo(tracingId: String, version: Option[Long] = None, remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[EditableMappingInfo] = - for { - (info, _) <- getInfoAndActualVersion(tracingId, version, remoteFallbackLayer) - } yield info - def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" def getBaseMappingName(tracingId: String): Fox[Option[String]] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(tracingId, None) - infoBox <- getClosestMaterialized(tracingId, desiredVersion).futureBox - } yield - infoBox match { - case Full(info) => Some(info.value.baseMappingName) - case _ => None - } - - def getInfoAndActualVersion( - tracingId: String, - requestedVersion: Option[Long] = None, - remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(EditableMappingInfo, Long)] = - for { - desiredVersion <- getClosestMaterializableVersionOrZero(tracingId, requestedVersion) - materializedInfo <- materializedInfoCache.getOrLoad( - (tracingId, desiredVersion), - _ => applyPendingUpdates(tracingId, desiredVersion, remoteFallbackLayer)) - } yield (materializedInfo, desiredVersion) - - def update(tracingId: String, - updateActionGroup: UpdateActionGroup, - newVersion: Long, - remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = - for { - actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) - _ <- dryApplyUpdates(tracingId, newVersion, actionsWithTimestamp, remoteFallbackLayer) ?~> "editableMapping.dryUpdate.failed" - _ <- tracingDataStore.editableMappingUpdates.put(tracingId, newVersion, actionsWithTimestamp) - } yield () - - private def dryApplyUpdates(tracingId: String, - newVersion: Long, - updates: List[UpdateAction], - remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[Unit] = - for { - (previousInfo, previousVersion) <- getInfoAndActualVersion(tracingId, None, remoteFallbackLayer) - updater = new EditableMappingUpdater( - tracingId, - previousInfo.baseMappingName, - previousVersion, - newVersion, - remoteFallbackLayer, - tc, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = updates.length <= 1 - ) - _ <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" - } yield () - - def applyPendingUpdates(tracingId: String, desiredVersion: Long, remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[EditableMappingInfo] = - for { - closestMaterializedWithVersion <- getClosestMaterialized(tracingId, desiredVersion) - updatedEditableMappingInfo: EditableMappingInfo <- if (desiredVersion == closestMaterializedWithVersion.version) - Fox.successful(closestMaterializedWithVersion.value) - else - for { - pendingUpdates <- getPendingUpdates(tracingId, closestMaterializedWithVersion.version, desiredVersion) - updater = new EditableMappingUpdater( - tracingId, - closestMaterializedWithVersion.value.baseMappingName, - closestMaterializedWithVersion.version, - desiredVersion, - remoteFallbackLayer, - tc, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = pendingUpdates.length <= 1 - ) - updated <- updater.applyUpdatesAndSave(closestMaterializedWithVersion.value, pendingUpdates) - } yield updated - } yield updatedEditableMappingInfo - - private def getClosestMaterialized(tracingId: String, - desiredVersion: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = - tracingDataStore.editableMappingsInfo.get(tracingId, version = Some(desiredVersion))( - fromProtoBytes[EditableMappingInfo]) - - def getClosestMaterializableVersionOrZero(tracingId: String, desiredVersion: Option[Long]): Fox[Long] = - tracingDataStore.editableMappingUpdates.getVersion(tracingId, - version = desiredVersion, - mayBeEmpty = Some(true), - emptyFallback = Some(0L)) - - private def getPendingUpdates(tracingId: String, - closestMaterializedVersion: Long, - closestMaterializableVersion: Long): Fox[List[UpdateAction]] = - if (closestMaterializableVersion == closestMaterializedVersion) { - Fox.successful(List.empty) - } else { - for { - updates <- getUpdateActionsWithVersions(tracingId, - newestVersion = closestMaterializableVersion, - oldestVersion = closestMaterializedVersion + 1L) - } yield updates.map(_._2).reverse.flatten - } - - private def getUpdateActionsWithVersions(tracingId: String, - newestVersion: Long, - oldestVersion: Long): Fox[List[(Long, List[UpdateAction])]] = { - val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) - for { - updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => - val batchFrom = batchRange._1 - val batchTo = batchRange._2 - for { - res <- tracingDataStore.editableMappingUpdates.getMultipleVersionsAsVersionValueTuple[List[UpdateAction]]( - tracingId, - Some(batchTo), - Some(batchFrom) - )(fromJsonBytes[List[UpdateAction]]) - } yield res - } ?~> "Failed to fetch editable mapping update actions from fossilDB" - flat = updateActionBatches.flatten - } yield flat - } + Fox.successful(None) def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], @@ -649,13 +529,12 @@ class EditableMappingService @Inject()( def agglomerateGraphNeighbors( tracingId: String, + editableMappingInfo: EditableMappingInfo, version: Long, parameters: NeighborsParameters, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[(Long, Seq[NodeWithPosition])] = for { - // called here to ensure updates are applied - mapping <- getInfo(tracingId, version = None, remoteFallbackLayer) - agglomerateGraph <- getAgglomerateGraphForIdWithFallback(mapping, + agglomerateGraph <- getAgglomerateGraphForIdWithFallback(editableMappingInfo, tracingId, version, parameters.agglomerateId, @@ -674,6 +553,7 @@ class EditableMappingService @Inject()( neighborNodes } + /* def merge(newTracingId: String, tracingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( implicit tc: TokenContext): Fox[Unit] = for { @@ -713,6 +593,8 @@ class EditableMappingService @Inject()( } } yield () + */ + private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = (0L to ((to - from) / batchSize)).map { batchIndex => val batchFrom = batchIndex * batchSize + from diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index bb33e48e366..5dc1e3017ba 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -10,7 +10,11 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient -import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionUpdateAction, UpdateAction} +import com.scalableminds.webknossos.tracingstore.annotation.{ + RevertToVersionUpdateAction, + TSAnnotationService, + UpdateAction +} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, @@ -32,6 +36,7 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala // this results in only one version increment in the db per update group class EditableMappingUpdater( + annotationId: String, tracingId: String, baseMappingName: String, oldVersion: Long, @@ -40,6 +45,7 @@ class EditableMappingUpdater( tokenContext: TokenContext, remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, tracingDataStore: TracingDataStore, relyOnAgglomerateIds: Boolean // False during merge and in case of multiple actions. Then, look up all agglomerate ids at positions ) extends KeyValueStoreImplicits @@ -418,7 +424,8 @@ class EditableMappingUpdater( implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" - oldInfo <- editableMappingService.getInfo(tracingId, Some(revertAction.sourceVersion), remoteFallbackLayer)( + oldInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, Some(revertAction.sourceVersion))( + ec, tokenContext) _ = segmentToAgglomerateBuffer.clear() _ = agglomerateToGraphBuffer.clear() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index c48e6ec3abe..e6a635c70d2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -995,7 +995,7 @@ class VolumeTracingService @Inject()( remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + // TODO_ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty From 2a888999aee5ec5438552431645399d8c18d9e40 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 25 Sep 2024 09:23:04 +0200 Subject: [PATCH 079/361] construct editableMappingLayer already in annotationservice --- .../annotation/TSAnnotationService.scala | 19 ++++++ .../controllers/VolumeTracingController.scala | 14 +++-- ...VolumeTracingZarrStreamingController.scala | 9 ++- .../EditableMappingLayer.scala | 14 +++-- .../EditableMappingService.scala | 58 +++++++------------ .../tracings/volume/TSFullMeshService.scala | 9 ++- .../VolumeSegmentStatisticsService.scala | 28 ++++++--- .../volume/VolumeTracingService.scala | 34 +++++------ 8 files changed, 104 insertions(+), 81 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 0e77b6177e7..ff40b6ffa9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -8,7 +8,9 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingLayer, EditableMappingService, EditableMappingUpdateAction, EditableMappingUpdater @@ -49,6 +51,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits with FallbackDataHelper + with ProtoGeometryImplicits with LazyLogging { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = @@ -365,4 +368,20 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } ) } + + def editableMappingLayer(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit tc: TokenContext): EditableMappingLayer = + EditableMappingLayer( + tracingId, + tracing.boundingBox, + resolutions = tracing.resolutions.map(vec3IntFromProto).toList, + largestSegmentId = Some(0L), + elementClass = tracing.elementClass, + tc, + tracing = tracing, + annotationId = annotationId, + tracingId = tracingId, + annotationService = this, + editableMappingService = editableMappingService + ) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index b8233ba03cd..98a3a903e61 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -170,9 +170,10 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") - (data, indices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, request.body) - else tracingService.data(tracingId, tracing, request.body) + (data, indices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, request.body) + } else tracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) } } @@ -292,9 +293,10 @@ class VolumeTracingController @Inject()( // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") - (vertices, neighbors) <- if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, request.body) - else tracingService.createAdHocMesh(annotationId, tracingId, request.body) + (vertices: Array[Float], neighbors: List[Int]) <- if (tracing.getHasEditableMapping) { + val editableMappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(editableMappingLayer, request.body) + } else tracingService.createAdHocMesh(tracingId, tracing, request.body) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index b1f58de065d..66ff9cad10e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -31,6 +31,7 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataFormat, DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.webknossos.tracingstore.{ @@ -48,6 +49,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracingService: VolumeTracingService, accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, remoteDataStoreClient: TSRemoteDatastoreClient, remoteWebknossosClient: TSRemoteWebknossosClient)(implicit ec: ExecutionContext) extends ExtendedController @@ -303,9 +305,10 @@ class VolumeTracingZarrStreamingController @Inject()( version = None, additionalCoordinates = additionalCoordinates ) - (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, List(wkRequest)) - else tracingService.data(tracingId, tracing, List(wkRequest)) + (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, List(wkRequest)) + } else tracingService.data(tracingId, tracing, List(wkRequest)) dataWithFallback <- getFallbackLayerDataIfEmpty(tracing, tracingId, data, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index c46b0ffc44b..867e21192f5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -22,6 +22,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ import ucar.ma2.{Array => MultiArray} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext @@ -35,10 +36,9 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied - (editableMappingInfo, editableMappingVersion) <- layer.editableMappingService.getInfoAndActualVersion( - tracingId, - requestedVersion = None, - remoteFallbackLayer = remoteFallbackLayer)(layer.tokenContext) + editableMappingInfo <- layer.annotationService.getEditableMappingInfo(layer.annotationId, + tracingId, + Some(layer.version))(ec, layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, @@ -56,7 +56,7 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP relevantMapping <- layer.editableMappingService.generateCombinedMappingForSegmentIds( segmentIds, editableMappingInfo, - editableMappingVersion, + layer.version, tracingId, remoteFallbackLayer)(layer.tokenContext) mappedData: Array[Byte] <- layer.editableMappingService.mapData(unmappedDataTyped, @@ -73,7 +73,9 @@ case class EditableMappingLayer(name: String, elementClass: ElementClass.Value, tokenContext: TokenContext, tracing: VolumeTracing, + annotationId: String, tracingId: String, + annotationService: TSAnnotationService, editableMappingService: EditableMappingService) extends SegmentationLayer { override val mags: List[MagLocator] = List.empty // MagLocators do not apply for annotation layers @@ -98,4 +100,6 @@ case class EditableMappingLayer(name: String, override def adminViewConfiguration: Option[LayerViewConfiguration] = None override def additionalAxes: Option[Seq[AdditionalAxis]] = None + + def version: Long = tracing.version } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index c48834d0992..779425710a2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -217,13 +217,14 @@ class EditableMappingService @Inject()( voxelAsLong <- voxelAsLongArray.headOption } yield voxelAsLong - def volumeData(tracing: VolumeTracing, tracingId: String, dataRequests: DataRequestCollection)( - implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { - - val dataLayer = editableMappingLayer(tracingId, tracing, tracingId) - val requests = dataRequests.map(r => - DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) - + def volumeData(editableMappingLayer: EditableMappingLayer, + dataRequests: DataRequestCollection): Fox[(Array[Byte], List[Int])] = { + val requests = dataRequests.map( + r => + DataServiceDataRequest(null, + editableMappingLayer, + r.cuboid(editableMappingLayer), + r.settings.copy(appliedAgglomerate = None))) binaryDataService.handleDataRequests(requests) } @@ -393,37 +394,20 @@ class EditableMappingService @Inject()( bytes = UnsignedIntegerArray.toByteArray(unsignedIntArray, elementClass) } yield bytes - private def editableMappingLayer(mappingName: String, tracing: VolumeTracing, tracingId: String)( - implicit tc: TokenContext): EditableMappingLayer = - EditableMappingLayer( - mappingName, - tracing.boundingBox, - resolutions = tracing.resolutions.map(vec3IntFromProto).toList, - largestSegmentId = Some(0L), - elementClass = tracing.elementClass, - tc, - tracing = tracing, - tracingId = tracingId, - editableMappingService = this + def createAdHocMesh(editableMappingLayer: EditableMappingLayer, + request: WebknossosAdHocMeshRequest): Fox[(Array[Float], List[Int])] = { + val adHocMeshRequest = AdHocMeshRequest( + dataSource = None, + dataLayer = editableMappingLayer, + cuboid = request.cuboid(editableMappingLayer), + segmentId = request.segmentId, + voxelSizeFactor = request.voxelSizeFactorInUnit, + mapping = None, + mappingType = None, + findNeighbors = request.findNeighbors ) - - def createAdHocMesh(tracing: VolumeTracing, tracingId: String, request: WebknossosAdHocMeshRequest)( - implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = - for { - mappingName <- tracing.mappingName.toFox - segmentationLayer = editableMappingLayer(mappingName, tracing, tracingId) - adHocMeshRequest = AdHocMeshRequest( - dataSource = None, - dataLayer = segmentationLayer, - cuboid = request.cuboid(segmentationLayer), - segmentId = request.segmentId, - voxelSizeFactor = request.voxelSizeFactorInUnit, - mapping = None, - mappingType = None, - findNeighbors = request.findNeighbors - ) - result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } yield result + adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } def getAgglomerateGraphForId(tracingId: String, version: Long, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 381f3420b5a..d51248458cd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -16,6 +16,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services.{FullMeshHelper, FullMeshRequest} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.FallbackDataHelper import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} @@ -26,6 +27,7 @@ import scala.concurrent.ExecutionContext class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, editableMappingService: EditableMappingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient) @@ -177,7 +179,8 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, adHocMeshRequest: WebknossosAdHocMeshRequest, annotationId: String, tracingId: String)(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = - if (tracing.getHasEditableMapping) - editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest) - else volumeTracingService.createAdHocMesh(annotationId, tracingId, adHocMeshRequest) + if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.createAdHocMesh(mappingLayer, adHocMeshRequest) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, adHocMeshRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index 6ad05f26680..82109a461a5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -10,12 +10,14 @@ import com.scalableminds.webknossos.datastore.models.{UnsignedInteger, UnsignedI import com.scalableminds.webknossos.datastore.models.datasource.DataLayer import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import javax.inject.Inject import scala.concurrent.ExecutionContext class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, volumeSegmentIndexService: VolumeSegmentIndexService, editableMappingService: EditableMappingService) extends ProtoGeometryImplicits @@ -59,7 +61,12 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext) = for { tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" - bucketData <- getVolumeDataForPositions(tracing, tracingId, mag, Seq(bucketPosition), additionalCoordinates) + bucketData <- getVolumeDataForPositions(annotationId, + tracingId, + tracing, + mag, + Seq(bucketPosition), + additionalCoordinates) dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray( bucketData, elementClassFromProto(tracing.elementClass)) @@ -89,11 +96,13 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci ) } yield allBucketPositions - private def getVolumeDataForPositions(tracing: VolumeTracing, - tracingId: String, - mag: Vec3Int, - bucketPositions: Seq[Vec3Int], - additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = { + private def getVolumeDataForPositions( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + mag: Vec3Int, + bucketPositions: Seq[Vec3Int], + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = bucketPositions.map { position => WebknossosDataRequest( @@ -107,9 +116,10 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci ) }.toList for { - (data, _) <- if (tracing.getHasEditableMapping) - editableMappingService.volumeData(tracing, tracingId, dataRequests) - else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true) + (data, _) <- if (tracing.getHasEditableMapping) { + val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) + editableMappingService.volumeData(mappingLayer, dataRequests) + } else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true) } yield data } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index e6a635c70d2..f3ab813b6d3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -661,24 +661,22 @@ class VolumeTracingService @Inject()( def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty - def createAdHocMesh(annotationId: String, tracingId: String, request: WebknossosAdHocMeshRequest)( - implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = - for { - tracing <- find(annotationId: String, tracingId) ?~> "tracing.notFound" - segmentationLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true) - adHocMeshRequest = AdHocMeshRequest( - None, - segmentationLayer, - request.cuboid(segmentationLayer), - request.segmentId, - request.voxelSizeFactorInUnit, - None, - None, - request.additionalCoordinates, - request.findNeighbors - ) - result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } yield result + def createAdHocMesh(tracingId: String, tracing: VolumeTracing, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { + val volumeLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true) + val adHocMeshRequest = AdHocMeshRequest( + None, + volumeLayer, + request.cuboid(volumeLayer), + request.segmentId, + request.voxelSizeFactorInUnit, + None, + None, + request.additionalCoordinates, + request.findNeighbors + ) + adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } def findData(annotationId: String, tracingId: String)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { From d57b4192fc50a32f3f3255904221786d6668dc69 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 25 Sep 2024 11:02:15 +0200 Subject: [PATCH 080/361] fix baseMappingName --- .../annotation/TSAnnotationService.scala | 9 ++++++++ .../controllers/VolumeTracingController.scala | 6 ++--- .../EditableMappingService.scala | 1 + .../tracings/volume/TSFullMeshService.scala | 22 ++++++++++--------- .../volume/VolumeTracingDownsampling.scala | 4 ++-- .../volume/VolumeTracingService.scala | 20 ++++++++--------- 6 files changed, 36 insertions(+), 26 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ff40b6ffa9d..6cb417d0b86 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -384,4 +384,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationService = this, editableMappingService = editableMappingService ) + + def baseMappingName(annotationId: String, tracingId: String, tracing: VolumeTracing)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Option[String]] = + if (tracing.getHasEditableMapping) + for { + editableMappingInfo <- getEditableMappingInfo(annotationId, tracingId) + } yield Some(editableMappingInfo.baseMappingName) + else Fox.successful(tracing.mappingName) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 98a3a903e61..1679838400c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -336,7 +336,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracingService.baseMappingName(tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentVolume(annotationId, tracingId, @@ -354,7 +354,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracingService.baseMappingName(tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, tracingId, @@ -373,7 +373,7 @@ class VolumeTracingController @Inject()( for { fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId) tracing <- tracingService.find(annotationId, tracingId) - mappingName <- tracingService.baseMappingName(tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 779425710a2..b8ea620e1b3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -195,6 +195,7 @@ class EditableMappingService @Inject()( bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" def getBaseMappingName(tracingId: String): Fox[Option[String]] = + // TODO Fox.successful(None) def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index d51248458cd..541b0ed519a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -42,16 +42,18 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, for { tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshfile(tracing, tracingId, fullMeshRequest) - else loadFullMeshFromAdHoc(tracing, annotationId, tracingId, fullMeshRequest) + loadFullMeshFromMeshfile(annotationId, tracingId, tracing, fullMeshRequest) + else loadFullMeshFromAdHoc(annotationId, tracingId, tracing, fullMeshRequest) } yield data - private def loadFullMeshFromMeshfile(tracing: VolumeTracing, tracingId: String, fullMeshRequest: FullMeshRequest)( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[Array[Byte]] = + private def loadFullMeshFromMeshfile( + annotationId: String, + tracingId: String, + tracing: VolumeTracing, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - baseMappingName <- volumeTracingService.baseMappingName(tracing) + baseMappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) fullMeshRequestAdapted = if (tracing.getHasEditableMapping) fullMeshRequest.copy(mappingName = baseMappingName, editableMappingTracingId = Some(tracingId), @@ -61,9 +63,9 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, } yield array private def loadFullMeshFromAdHoc( - tracing: VolumeTracing, annotationId: String, tracingId: String, + tracing: VolumeTracing, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" @@ -71,7 +73,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, before = Instant.now voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId) ?~> "voxelSize.failedToFetch" verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) - getAllAdHocChunksWithSegmentIndex(annotationId, tracing, tracingId, mag, voxelSize, fullMeshRequest) + getAllAdHocChunksWithSegmentIndex(annotationId, tracingId, tracing, mag, voxelSize, fullMeshRequest) else getAllAdHocChunksWithNeighborLogic( tracing, @@ -90,14 +92,14 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, private def getAllAdHocChunksWithSegmentIndex( annotationId: String, - tracing: VolumeTracing, tracingId: String, + tracing: VolumeTracing, mag: Vec3Int, voxelSize: VoxelSize, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = for { fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId) - mappingName <- volumeTracingService.baseMappingName(tracing) + mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 97cf49ed688..5a85c53a8f8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -74,7 +74,7 @@ trait VolumeTracingDownsampling protected def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] - protected def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] + protected def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] protected def volumeSegmentIndexClient: FossilDBClient @@ -119,7 +119,7 @@ trait VolumeTracingDownsampling _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => for { _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index f3ab813b6d3..31b40df0961 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -26,7 +26,6 @@ import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, @@ -57,7 +56,6 @@ class VolumeTracingService @Inject()( implicit val ec: ExecutionContext, val handledGroupIdStore: TracingStoreRedisStore, val uncommittedUpdatesStore: TracingStoreRedisStore, - editableMappingService: EditableMappingService, val temporaryTracingIdStore: TracingStoreRedisStore, val remoteDatastoreClient: TSRemoteDatastoreClient, val annotationService: TSAnnotationService, @@ -163,7 +161,7 @@ class VolumeTracingService @Inject()( dataLayer = volumeTracingLayer(tracingId, volumeTracing) actionBucketData <- action.base64Data.map(Base64.getDecoder.decode).toFox _ <- saveBucket(dataLayer, bucketPosition, actionBucketData, updateGroupVersion) ?~> "failed to save bucket" - mappingName <- baseMappingName(volumeTracing) + mappingName <- selectMappingName(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(updateGroupVersion - 1L)).futureBox @@ -197,9 +195,9 @@ class VolumeTracingService @Inject()( override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None - override def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] = + def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = if (tracing.getHasEditableMapping) - tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) + Fox.failure("mappingName called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) private def deleteSegmentData(annotationId: String, @@ -217,7 +215,7 @@ class VolumeTracingService @Inject()( } else { possibleAdditionalCoordinates.toList } - mappingName <- baseMappingName(volumeTracing) + mappingName <- selectMappingName(volumeTracing) _ <- Fox.serialCombined(volumeTracing.resolutions.toList)(resolution => Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(resolution) @@ -350,7 +348,7 @@ class VolumeTracingService @Inject()( _ = if (resolutionSet.nonEmpty) resolutionSets.add(resolutionSet) } yield () } - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) resolutions <- // if none of the tracings contained any volume data do not save buckets, use full resolution list, as already initialized on wk-side if (resolutionSets.isEmpty) @@ -416,7 +414,7 @@ class VolumeTracingService @Inject()( val savedResolutions = new mutable.HashSet[Vec3Int]() for { fallbackLayer <- getFallbackLayer(annotationId, tracingId) - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -578,7 +576,7 @@ class VolumeTracingService @Inject()( AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), tc ) - mappingName <- baseMappingName(sourceTracing) + mappingName <- selectMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => if (destinationTracing.resolutions.contains(vec3IntToProto(bucketPosition.mag))) { @@ -862,7 +860,7 @@ class VolumeTracingService @Inject()( sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() fallbackLayer <- getFallbackLayer(annotationId, tracingId) - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, currentVersion + 1L, @@ -938,7 +936,7 @@ class VolumeTracingService @Inject()( tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) fallbackLayer <- getFallbackLayer(annotationId, tracingId) - mappingName <- baseMappingName(tracing) + mappingName <- selectMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, From 401c7f95580c29300ad50378fd92c684b62a1f12 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 25 Sep 2024 11:41:31 +0200 Subject: [PATCH 081/361] wip: fix segmentsForAgglomerate --- .../WKRemoteTracingStoreController.scala | 10 ++++++++++ conf/webknossos.latest.routes | 1 + .../tracingstore/TSRemoteWebknossosClient.scala | 13 +++++++++++++ .../controllers/EditableMappingController.scala | 6 ++++-- ...com.scalableminds.webknossos.tracingstore.routes | 2 +- 5 files changed, 29 insertions(+), 3 deletions(-) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 2931937f09d..1476dba7c28 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -114,6 +114,16 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } + def annotationIdForTracing(name: String, key: String, tracingId: String): Action[AnyContent] = + Action.async { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" + } yield Ok(Json.toJson(annotation._id)) + } + } + def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index c250497ba6c..5b9ad059207 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -125,6 +125,7 @@ POST /tracingstores/:name/validateUserAccess PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) +GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) # User access tokens for datastore authentication diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 09e3e019e8c..08198fe4ff7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -19,6 +19,7 @@ import play.api.libs.json.{JsObject, Json, OFormat} import play.api.libs.ws.WSResponse import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt case class TracingUpdatesReport(annotationId: String, // TODO stats per tracing id? @@ -44,6 +45,7 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() + private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = AlfuCache(timeToLive = 5 minutes) def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") @@ -74,6 +76,17 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[DataSourceId] ) + // TODO what about temporary/compound tracings? + def getAnnotationIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[String] = + annotationIdByTracingIdCache.getOrLoad( + tracingId, + tracingId => + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/annotationId") + .addQueryString("tracingId" -> tracingId) + .addQueryString("key" -> tracingStoreKey) + .getWithJsonResponse[String] + ) + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 24e41a83f88..cf095dbd421 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} -import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, TSAnnotationService, @@ -27,6 +27,7 @@ import scala.concurrent.ExecutionContext class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, annotationService: TSAnnotationService, + remoteWebknossosClient: TSRemoteWebknossosClient, accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, annotationTransactionService: AnnotationTransactionService)( @@ -117,11 +118,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def segmentIdsForAgglomerate(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = + def segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index a7b9619e365..8d074e46e96 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -36,7 +36,7 @@ POST /volume/mergedFromContents @c # Editable Mappings POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(annotationId: String, tracingId: String) GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]) -GET /mapping/:annotationId/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(annotationId: String, tracingId: String, agglomerateId: Long) +GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(annotationId: String, tracingId: String) POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(annotationId: String, tracingId: String) POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(annotationId: String, tracingId: String) From 5f71bfc3a8d234339a7e4af86bcc997f8807cfe6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 25 Sep 2024 13:22:02 +0200 Subject: [PATCH 082/361] flush --- .../annotation/AnnotationWithTracings.scala | 18 +++++++++++ .../annotation/TSAnnotationService.scala | 30 +++++++++++++++++-- .../tracings/volume/VolumeUpdateActions.scala | 5 ++-- 3 files changed, 47 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index e8c59a7010d..37b17dfb8f5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -31,6 +31,24 @@ case class AnnotationWithTracings( } } yield skeletonTracing + def getVolumes: List[(String, VolumeTracing)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) => Some(id, vt) + case _ => None + }.toList + + def getSkeletons: List[(String, SkeletonTracing)] = + tracingsById.view.flatMap { + case (id, Left(st: SkeletonTracing)) => Some(id, st) + case _ => None + }.toList + + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = + editableMappingsByTracingId.view.flatMap { + case (id, (info: EditableMappingInfo, _)) => Some(id, info) + case _ => None + }.toList + def getVolume(tracingId: String): Box[VolumeTracing] = for { tracingEither <- tracingsById.get(tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 6cb417d0b86..6f3a995bcb8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -322,12 +322,36 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss else { for { updated <- updateIter(Some(annotation), updates) - _ <- updated.flushBufferedUpdates() - // todo: save materialized tracings + editable mapping info - } yield updated.withVersion(targetVersion) + updatedWithNewVerson = updated.withVersion(targetVersion) + _ <- updatedWithNewVerson.flushBufferedUpdates() + _ <- flushUpdatedTracings(updatedWithNewVerson) + _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) + } yield updatedWithNewVerson } } + private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = + // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) + for { + _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { + case (volumeTracingId, volumeTracing) => + tracingDataStore.volumes.put(volumeTracingId, volumeTracing.version, volumeTracing) + } + _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { + case (skeletonTracingId, skeletonTracing: SkeletonTracing) => + tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) + } + _ <- Fox.serialCombined(annotationWithTracings.getEditableMappingsInfo) { + case (volumeTracingId, editableMappingInfo) => + tracingDataStore.editableMappingsInfo.put(volumeTracingId, + annotationWithTracings.version, + editableMappingInfo) + } + } yield () + + private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = + tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) + private def determineTargetVersion(annotation: AnnotationProto, annotationId: String, targetVersionOpt: Option[Long]): Fox[Long] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index f2d791d2973..04833f3dbcc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -269,8 +269,7 @@ case class DeleteSegmentVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends BucketMutatingVolumeUpdateAction - with ApplyableVolumeUpdateAction { + extends ApplyableVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) @@ -288,7 +287,7 @@ case class DeleteSegmentDataVolumeAction(id: Long, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) - extends VolumeUpdateAction { + extends BucketMutatingVolumeUpdateAction { override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) From 7f4a575228355ffc38cc6c779f4b9ded21020479 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 13:42:05 +0200 Subject: [PATCH 083/361] look up annotation id by tracing id rather than expanding the routes --- conf/messages | 1 + frontend/javascripts/admin/admin_rest_api.ts | 19 +-- .../bucket_data_handling/wkstore_adapter.ts | 3 +- .../oxalis/model/sagas/mapping_saga.ts | 1 - .../oxalis/model/sagas/proofread_saga.ts | 14 +- .../oxalis/model_initialization.ts | 4 +- .../segments_tab/segments_view_helper.tsx | 3 +- .../TSRemoteWebknossosClient.scala | 5 +- .../EditableMappingController.scala | 18 ++- .../SkeletonTracingController.scala | 4 +- .../controllers/TracingController.scala | 3 +- .../controllers/VolumeTracingController.scala | 44 ++++--- ...VolumeTracingZarrStreamingController.scala | 43 +++--- .../tracings/TracingService.scala | 14 +- .../skeleton/SkeletonTracingService.scala | 4 +- ...alableminds.webknossos.tracingstore.routes | 123 +++++++++--------- 16 files changed, 148 insertions(+), 155 deletions(-) diff --git a/conf/messages b/conf/messages index be5596c7627..191c8e6627c 100644 --- a/conf/messages +++ b/conf/messages @@ -252,6 +252,7 @@ annotation.deleteLayer.explorationalsOnly=Could not delete a layer because it is annotation.deleteLayer.onlyLayer=Could not delete layer because it is the only layer in this annotation. annotation.layer.notFound=Layer could not be found. annotation.getNewestVersion.failed=Could not get the newest version information for this annotation layer +annotation.idForTracing.failed=Could not find the annotation id for this tracing id. mesh.notFound=Mesh could not be found mesh.write.failed=Failed to convert mesh info to json diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 9e3e4ebda6a..89d6c5fe1cf 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -896,7 +896,7 @@ export async function getTracingForAnnotationType( const possibleVersionString = version != null ? `&version=${version}` : ""; const tracingArrayBuffer = await doWithToken((token) => Request.receiveArraybuffer( - `${annotation.tracingStore.url}/tracings/${tracingType}/${annotation.id}/${tracingId}?token=${token}${possibleVersionString}`, + `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?token=${token}${possibleVersionString}`, { headers: { Accept: "application/x-protobuf", @@ -1614,12 +1614,11 @@ export function fetchMapping( export function makeMappingEditable( tracingStoreUrl: string, - annotationId: string, tracingId: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/makeMappingEditable?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/makeMappingEditable?token=${token}`, { method: "POST", }, @@ -1629,13 +1628,10 @@ export function makeMappingEditable( export function getEditableMappingInfo( tracingStoreUrl: string, - annotationId: string, tracingId: string, ): Promise { return doWithToken((token) => - Request.receiveJSON( - `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/info?token=${token}`, - ), + Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?token=${token}`), ); } @@ -2122,7 +2118,6 @@ export async function getAgglomeratesForSegmentsFromDatastore( tracingStoreUrl: string, - annotationId: string, tracingId: string, segmentIds: Array, ): Promise { @@ -2132,7 +2127,7 @@ export async function getAgglomeratesForSegmentsFromTracingstore Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomeratesForSegments?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?token=${token}`, { method: "POST", body: segmentIdBuffer, @@ -2311,7 +2306,6 @@ type MinCutTargetEdge = { }; export async function getEdgesForAgglomerateMinCut( tracingStoreUrl: string, - annotationId: string, tracingId: string, segmentsInfo: { segmentId1: NumberLike; @@ -2323,7 +2317,7 @@ export async function getEdgesForAgglomerateMinCut( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomerateGraphMinCut?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphMinCut?token=${token}`, { data: { ...segmentsInfo, @@ -2345,7 +2339,6 @@ export type NeighborInfo = { export async function getNeighborsForAgglomerateNode( tracingStoreUrl: string, tracingId: string, - annotationId: string, segmentInfo: { segmentId: NumberLike; mag: Vector3; @@ -2355,7 +2348,7 @@ export async function getNeighborsForAgglomerateNode( ): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${tracingStoreUrl}/tracings/mapping/${annotationId}/${tracingId}/agglomerateGraphNeighbors?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateGraphNeighbors?token=${token}`, { data: { ...segmentInfo, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 963ca797e1c..4db8385e75b 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -110,8 +110,7 @@ export async function requestWithFallback( optLayerName || layerInfo.name }`; - const getTracingStoreUrl = () => - `${tracingStoreHost}/tracings/volume/${state.tracing.annotationId}/${layerInfo.name}`; + const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; const maybeVolumeTracing = "tracingId" in layerInfo && layerInfo.tracingId != null diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index ddfb14597ca..1a0044c4370 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -455,7 +455,6 @@ function* updateLocalHdf5Mapping( ? yield* call( getAgglomeratesForSegmentsFromTracingstore, annotation.tracingStore.url, - annotation.annotationId, editableMapping.tracingId, Array.from(newSegmentIds), ) diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 653f5f5919a..de523f53795 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -265,7 +265,6 @@ function* createEditableMapping(): Saga { * name of the HDF5 mapping for which the editable mapping is about to be created. */ const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - const annotationId = yield* select((state) => state.tracing.annotationId); // Save before making the mapping editable to make sure the correct mapping is activated in the backend yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date @@ -276,12 +275,7 @@ function* createEditableMapping(): Saga { const volumeTracingId = upToDateVolumeTracing.tracingId; const layerName = volumeTracingId; - const serverEditableMapping = yield* call( - makeMappingEditable, - tracingStoreUrl, - annotationId, - volumeTracingId, - ); + const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); @@ -546,7 +540,6 @@ function* performMinCut( } const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - const annotationId = yield* select((state) => state.tracing.annotationId); const segmentsInfo = { segmentId1: sourceSegmentId, segmentId2: targetSegmentId, @@ -558,7 +551,6 @@ function* performMinCut( const edgesToRemove = yield* call( getEdgesForAgglomerateMinCut, tracingStoreUrl, - annotationId, volumeTracingId, segmentsInfo, ); @@ -609,7 +601,6 @@ function* performCutFromNeighbors( { didCancel: false; neighborInfo: NeighborInfo } | { didCancel: true; neighborInfo?: null } > { const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - const annotationId = yield* select((state) => state.tracing.annotationId); const segmentsInfo = { segmentId, mag: agglomerateFileMag, @@ -620,7 +611,6 @@ function* performCutFromNeighbors( const neighborInfo = yield* call( getNeighborsForAgglomerateNode, tracingStoreUrl, - annotationId, volumeTracingId, segmentsInfo, ); @@ -1283,13 +1273,11 @@ function* splitAgglomerateInMapping( .map(([segmentId, _agglomerateId]) => segmentId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - const annotationId = yield* select((state) => state.tracing.annotationId); // Ask the server to map the (split) segment ids. This creates a partial mapping // that only contains these ids. const mappingAfterSplit = yield* call( getAgglomeratesForSegmentsFromTracingstore, tracingStoreUrl, - annotationId, volumeTracingId, splitSegmentIds, ); diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index adb09da6fb5..5ff7619bfab 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -213,7 +213,6 @@ export async function initialize( if (annotation != null) { const editableMappings = await fetchEditableMappings( annotation.tracingStore.url, - annotation.id, serverVolumeTracings, ); initializeTracing(annotation, serverTracings, editableMappings); @@ -249,12 +248,11 @@ async function fetchParallel( async function fetchEditableMappings( tracingStoreUrl: string, - annotationId: string, serverVolumeTracings: ServerVolumeTracing[], ): Promise { const promises = serverVolumeTracings .filter((tracing) => tracing.hasEditableMapping) - .map((tracing) => getEditableMappingInfo(tracingStoreUrl, annotationId, tracing.id)); + .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id)); return Promise.all(promises); } diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 818b990e681..0440554ef95 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -50,8 +50,7 @@ export function getVolumeRequestUrl( return `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${visibleSegmentationLayer.name}`; } else { const tracingStoreHost = tracing?.tracingStore.url; - const annotationId = tracing?.annotationId; - return `${tracingStoreHost}/tracings/volume/${annotationId}/${tracingId}`; + return `${tracingStoreHost}/tracings/volume/${tracingId}`; } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 08198fe4ff7..8024af01d16 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -45,7 +45,8 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() - private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = AlfuCache(timeToLive = 5 minutes) + private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = + AlfuCache(maxCapacity = 10000, timeToLive = 5 minutes) def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") @@ -85,7 +86,7 @@ class TSRemoteWebknossosClient @Inject()( .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) .getWithJsonResponse[String] - ) + ) ?~> "annotation.idForTracing.failed" override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index cf095dbd421..cc781fe939d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -35,11 +35,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer bodyParsers: PlayBodyParsers) extends Controller { - def makeMappingEditable(annotationId: String, tracingId: String): Action[AnyContent] = + def makeMappingEditable(tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" _ <- assertMappingIsNotLocked(tracing) @@ -104,11 +105,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } */ - def editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version) @@ -141,11 +143,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateIdsForSegments(annotationId: String, tracingId: String): Action[ListOfLong] = + def agglomerateIdsForSegments(tracingId: String): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) @@ -162,11 +165,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateGraphMinCut(annotationId: String, tracingId: String): Action[MinCutParameters] = + def agglomerateGraphMinCut(tracingId: String): Action[MinCutParameters] = Action.async(validateJson[MinCutParameters]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) @@ -181,11 +185,12 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateGraphNeighbors(annotationId: String, tracingId: String): Action[NeighborsParameters] = + def agglomerateGraphNeighbors(tracingId: String): Action[NeighborsParameters] = Action.async(validateJson[NeighborsParameters]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) @@ -200,10 +205,11 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer } } - def agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long): Action[AnyContent] = + def agglomerateSkeleton(tracingId: String, agglomerateId: Long): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 79ebfb8138b..f74c54dc999 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -49,8 +49,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer } } - def duplicate(annotationId: String, - tracingId: String, + def duplicate(tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], @@ -60,6 +59,7 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index a8c1b04dd60..a7ecc6fb09c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -70,11 +70,12 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C } } - def get(annotationId: String, tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 1679838400c..bdf301581d4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -78,15 +78,13 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def initialData(annotationId: String, - tracingId: String, - minResolution: Option[Int], - maxResolution: Option[Int]): Action[AnyContent] = + def initialData(tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) @@ -119,12 +117,13 @@ class VolumeTracingController @Inject()( } } - def initialDataMultiple(annotationId: String, tracingId: String): Action[AnyContent] = + def initialDataMultiple(tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") resolutions <- tracingService @@ -137,8 +136,7 @@ class VolumeTracingController @Inject()( } } - def allDataZip(annotationId: String, - tracingId: String, + def allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSizeFactor: Option[String], @@ -147,6 +145,7 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) @@ -164,11 +163,12 @@ class VolumeTracingController @Inject()( } } - def data(annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = + def data(tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) { val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) @@ -185,8 +185,7 @@ class VolumeTracingController @Inject()( private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(annotationId: String, - tracingId: String, + def duplicate(tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], @@ -198,6 +197,7 @@ class VolumeTracingController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") _ = logger.info(s"Duplicating volume tracing $tracingId...") datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) @@ -231,11 +231,12 @@ class VolumeTracingController @Inject()( } } - def importVolumeData(annotationId: String, tracingId: String): Action[MultipartFormData[TemporaryFile]] = + def importVolumeData(tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox @@ -249,11 +250,12 @@ class VolumeTracingController @Inject()( } } - def addSegmentIndex(annotationId: String, tracingId: String, dryRun: Boolean): Action[AnyContent] = + def addSegmentIndex(tracingId: String, dryRun: Boolean): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- annotationService.currentMaterializableVersion(tracingId) before = Instant.now @@ -285,13 +287,14 @@ class VolumeTracingController @Inject()( } } - def requestAdHocMesh(annotationId: String, tracingId: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") (vertices: Array[Float], neighbors: List[Int]) <- if (tracing.getHasEditableMapping) { val editableMappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) @@ -306,10 +309,11 @@ class VolumeTracingController @Inject()( } } - def loadFullMeshStl(annotationId: String, tracingId: String): Action[FullMeshRequest] = + def loadFullMeshStl(tracingId: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) data: Array[Byte] <- fullMeshService.loadFor(annotationId, tracingId, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } @@ -321,9 +325,10 @@ class VolumeTracingController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => + def findData(tracingId: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) positionOpt <- tracingService.findData(annotationId, tracingId) } yield { Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) @@ -331,10 +336,11 @@ class VolumeTracingController @Inject()( } } - def getSegmentVolume(annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -349,10 +355,11 @@ class VolumeTracingController @Inject()( } } - def getSegmentBoundingBox(annotationId: String, tracingId: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(tracingId: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => @@ -367,10 +374,11 @@ class VolumeTracingController @Inject()( } } - def getSegmentIndex(annotationId: String, tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = + def getSegmentIndex(tracingId: String, segmentId: Long): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId) tracing <- tracingService.find(annotationId, tracingId) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 66ff9cad10e..7108a8104f6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -59,10 +59,11 @@ class VolumeTracingZarrStreamingController @Inject()( override def defaultErrorCode: Int = NOT_FOUND - def volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContent(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) @@ -78,10 +79,11 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int): Action[AnyContent] = + def volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) @@ -91,15 +93,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContent(annotationId: String, - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -114,13 +113,11 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def volumeTracingMagFolderContentJson(annotationId: String, - tracingId: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + def volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -130,10 +127,11 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zArray(annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + def zArray(tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -164,10 +162,11 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrJsonForMag(annotationId: String, tracingId: String, mag: String): Action[AnyContent] = + def zarrJsonForMag(tracingId: String, mag: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) @@ -210,7 +209,7 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zGroup(annotationId: String, tracingId: String): Action[AnyContent] = Action.async { implicit request => + def zGroup(tracingId: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { Future(Ok(Json.toJson(NgffGroupHeader(zarr_format = 2)))) } @@ -222,13 +221,12 @@ class VolumeTracingZarrStreamingController @Inject()( * Used by zarr-streaming. */ def zAttrs( - annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(tracingId, @@ -239,13 +237,12 @@ class VolumeTracingZarrStreamingController @Inject()( } def zarrJson( - annotationId: String, tracingId: String, ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(tracingId, @@ -257,15 +254,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def zarrSource(annotationId: String, - tracingId: String, - tracingName: Option[String], - zarrVersion: Int): Action[AnyContent] = + def zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND - zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), largestSegmentId = tracing.largestSegmentId, @@ -280,11 +274,12 @@ class VolumeTracingZarrStreamingController @Inject()( } } - def rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String): Action[AnyContent] = + def rawZarrCube(tracingId: String, mag: String, coordinates: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index ae23b35d8d4..01e87fa3a37 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -2,7 +2,8 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.datastore.services.RemoteWebknossosClient +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats @@ -38,6 +39,8 @@ trait TracingService[T <: GeneratedMessage] def temporaryTracingIdStore: TracingStoreRedisStore + def remoteWebknossosClient: TSRemoteWebknossosClient + def tracingMigrationService: TracingMigrationService[T] def annotationService: TSAnnotationService @@ -109,8 +112,6 @@ trait TracingService[T <: GeneratedMessage] } */ - def applyPendingUpdates(tracing: T, tracingId: String, targetVersion: Option[Long]): Fox[T] = Fox.successful(tracing) - def find(annotationId: String, tracingId: String, version: Option[Long] = None, @@ -121,8 +122,11 @@ trait TracingService[T <: GeneratedMessage] implicit tc: TokenContext): Fox[List[Option[T]]] = Fox.combined { selectors.map { - case Some(selector) => // TODO TracingSelector needs annotationIds too - find("dummyAnnotationid", selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- find(annotationId, selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) + } yield tracing case None => Fox.successful(None) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index e952eb33f6f..66c8ac9c9d9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,7 +8,8 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.datastore.services.RemoteWebknossosClient +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats @@ -22,6 +23,7 @@ class SkeletonTracingService @Inject()( val temporaryTracingStore: TemporaryTracingStore[SkeletonTracing], val handledGroupIdStore: TracingStoreRedisStore, val temporaryTracingIdStore: TracingStoreRedisStore, + val remoteWebknossosClient: TSRemoteWebknossosClient, val uncommittedUpdatesStore: TracingStoreRedisStore, val annotationService: TSAnnotationService, val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 8d074e46e96..6a4af8dd09d 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -1,79 +1,78 @@ -# Routes -# This file defines all application routes (Higher priority routes first) +# Defines tracingstore routes (Higher priority routes first) # ~~~~ # Health endpoint -GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health +GET /health @com.scalableminds.webknossos.tracingstore.controllers.Application.health # Annotations (concerns AnnotationProto, not annotation info as stored in postgres) -POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) -GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) -POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) -GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) -GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) +POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) +GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) +POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) +GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) +GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) +GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() -POST /volume/:annotationId/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(annotationId: String, tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) -POST /volume/:annotationId/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(annotationId: String, tracingId: String) -GET /volume/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(annotationId: String, tracingId: String, version: Option[Long]) -GET /volume/:annotationId/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(annotationId: String, tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:annotationId/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(annotationId: String, tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) -POST /volume/:annotationId/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(annotationId: String, tracingId: String, segmentId: Long) -POST /volume/:annotationId/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(annotationId: String, tracingId: String, dryRun: Boolean) -GET /volume/:annotationId/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(annotationId: String, tracingId: String) -POST /volume/:annotationId/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(annotationId: String, tracingId: String) -POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(persist: Boolean) -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() +POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) +POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) +POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) +POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) +POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) +POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(tracingId: String) +POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(tracingId: String, dryRun: Boolean) +GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(tracingId: String) +POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) +POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) +POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple +POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(persist: Boolean) +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings -POST /mapping/:annotationId/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(annotationId: String, tracingId: String) -GET /mapping/:annotationId/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(annotationId: String, tracingId: String, version: Option[Long]) -GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) -POST /mapping/:annotationId/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(annotationId: String, tracingId: String) -POST /mapping/:annotationId/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(annotationId: String, tracingId: String) -POST /mapping/:annotationId/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(annotationId: String, tracingId: String) +POST /mapping/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(tracingId: String) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) +GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String) +POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) +POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) # TODO rename -GET /volume/:annotationId/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(annotationId: String, tracingId: String, agglomerateId: Long) +GET /volume/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) # Zarr endpoints for volume annotations # Zarr version 2 -GET /volume/zarr/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(annotationId: String, tracingId: String) -GET /volume/zarr/:annotationId/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(annotationId: String, tracingId: String) -GET /volume/zarr/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 2) -GET /volume/zarr/:annotationId/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(annotationId: String, tracingId: String, mag: String) -GET /volume/zarr/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String) +GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/.zgroup @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zGroup(tracingId: String) +GET /volume/zarr/:tracingId/.zattrs @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zAttrs(tracingId: String) +GET /volume/zarr/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 2) +GET /volume/zarr/:tracingId/:mag/.zarray @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zArray(tracingId: String, mag: String) +GET /volume/zarr/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Zarr version 3 -GET /volume/zarr3_experimental/json/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/json/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(annotationId: String, tracingId: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(annotationId: String, tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(annotationId: String, tracingId: String) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(annotationId: String, tracingId: String, mag: String, zarrVersion: Int = 3) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(annotationId: String, tracingId: String, mag: String) -GET /volume/zarr3_experimental/:annotationId/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(annotationId: String, tracingId: String, mag: String, coordinates: String) +GET /volume/zarr3_experimental/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/json/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContentJson(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContent(tracingId: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarrSource @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrSource(tracingId: String, tracingName: Option[String], zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJson(tracingId: String) +GET /volume/zarr3_experimental/:tracingId/:mag @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/ @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingMagFolderContent(tracingId: String, mag: String, zarrVersion: Int = 3) +GET /volume/zarr3_experimental/:tracingId/:mag/zarr.json @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.zarrJsonForMag(tracingId: String, mag: String) +GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.rawZarrCube(tracingId: String, mag: String, coordinates: String) # Skeleton tracings -POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() -POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() -POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) -POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) -GET /skeleton/:annotationId/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(annotationId: String, tracingId: String, version: Option[Long]) -POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple -POST /skeleton/:annotationId/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(annotationId: String, tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() +POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() +POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) +POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) +POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple +POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From 1ff9b9102861d1b14c0ab83e94810bf96a28f586 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 14:01:39 +0200 Subject: [PATCH 084/361] resolve some todos, close annotationUpdates grpc handle, remove unused functions --- .../WKRemoteTracingStoreClient.scala | 2 +- frontend/javascripts/admin/admin_rest_api.ts | 2 +- .../AnnotationTransactionService.scala | 11 +++++------ .../annotation/TSAnnotationService.scala | 4 ++-- .../controllers/VolumeTracingController.scala | 12 ------------ .../tracings/TracingDataStore.scala | 10 +--------- .../tracings/TracingSelector.scala | 2 +- .../EditableMappingService.scala | 7 +------ .../tracings/volume/VolumeTracingService.scala | 18 ------------------ ...calableminds.webknossos.tracingstore.routes | 3 +-- 10 files changed, 13 insertions(+), 58 deletions(-) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index d6bcf96bcdd..590d5862cc3 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -87,7 +87,7 @@ class WKRemoteTracingStoreClient( rpc(s"${tracingStore.url}/tracings/annotation/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("annotationId" -> annotationId.toString) - .postProto[AnnotationProto](annotationProto) // TODO why didn’t the failure bubble up? + .postProto[AnnotationProto](annotationProto) } def duplicateSkeletonTracing(skeletonTracingId: String, diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 89d6c5fe1cf..28204b26da5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2155,7 +2155,7 @@ export function getEditableAgglomerateSkeleton( ): Promise { return doWithToken((token) => Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateSkeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. // However, the server might send an error json instead of an array buffer. Therefore, don't use the webworker code. { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 63e700c8bcf..0bedbdab47a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -18,12 +18,11 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -class AnnotationTransactionService @Inject()( - handledGroupIdStore: TracingStoreRedisStore, // TODO: instantiate here rather than with injection, give fix namespace prefix? - uncommittedUpdatesStore: TracingStoreRedisStore, - volumeTracingService: VolumeTracingService, - tracingDataStore: TracingDataStore, - annotationService: TSAnnotationService) +class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRedisStore, + uncommittedUpdatesStore: TracingStoreRedisStore, + volumeTracingService: VolumeTracingService, + tracingDataStore: TracingDataStore, + annotationService: TSAnnotationService) extends KeyValueStoreImplicits with LazyLogging { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 6f3a995bcb8..10f8d15a8d3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -216,7 +216,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping logger.info(s"fetching editable mappings ${volumeWithEditableMapping.map(_._2).mkString(",")}") - // TODO intersect with editable mapping updates? + // TODO perf optimization: intersect with editable mapping updates? unless requested for { idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { case (volumeTracing, volumeTracingId) => @@ -373,7 +373,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def updateActionStatistics(tracingId: String): Fox[JsObject] = for { - updateActionGroups <- tracingDataStore.skeletonUpdates.getMultipleVersions(tracingId)( + updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions(tracingId)( fromJsonBytes[List[UpdateAction]]) updateActions = updateActionGroups.flatten } yield { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index bdf301581d4..243761dea06 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -275,18 +275,6 @@ class VolumeTracingController @Inject()( } } - def updateActionLog(tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - updateLog <- tracingService.updateActionLog(tracingId, newestVersion, oldestVersion) - } yield Ok(updateLog) - } - } - } - def requestAdHocMesh(tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index a836eb43a7c..e5e5d0ad777 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -22,16 +22,12 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val skeletons = new FossilDBClient("skeletons", config, slackNotificationService) - lazy val skeletonUpdates = new FossilDBClient("skeletonUpdates", config, slackNotificationService) - lazy val volumes = new FossilDBClient("volumes", config, slackNotificationService) lazy val volumeData = new FossilDBClient("volumeData", config, slackNotificationService) lazy val volumeSegmentIndex = new FossilDBClient("volumeSegmentIndex", config, slackNotificationService) - lazy val volumeUpdates = new FossilDBClient("volumeUpdates", config, slackNotificationService) - lazy val editableMappingsInfo = new FossilDBClient("editableMappingsInfo", config, slackNotificationService) lazy val editableMappingsAgglomerateToGraph = @@ -40,8 +36,6 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, lazy val editableMappingsSegmentToAgglomerate = new FossilDBClient("editableMappingsSegmentToAgglomerate", config, slackNotificationService) - lazy val editableMappingUpdates = new FossilDBClient("editableMappingUpdates", config, slackNotificationService) - lazy val annotations = new FossilDBClient("annotations", config, slackNotificationService) lazy val annotationUpdates = new FossilDBClient("annotationUpdates", config, slackNotificationService) @@ -49,14 +43,12 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, private def shutdown(): Unit = { healthClient.shutdown() skeletons.shutdown() - skeletonUpdates.shutdown() + annotationUpdates.shutdown() volumes.shutdown() volumeData.shutdown() - volumeUpdates.shutdown() editableMappingsInfo.shutdown() editableMappingsAgglomerateToGraph.shutdown() editableMappingsSegmentToAgglomerate.shutdown() - editableMappingUpdates.shutdown() volumeSegmentIndex.shutdown() () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala index 14598c9d5aa..0329c57e34a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingSelector.scala @@ -2,6 +2,6 @@ package com.scalableminds.webknossos.tracingstore.tracings import play.api.libs.json.{Json, OFormat} -case class TracingSelector(tracingId: String, version: Option[Long] = None) // TODO must pass annotation id +case class TracingSelector(tracingId: String, version: Option[Long] = None) object TracingSelector { implicit val jsonFormat: OFormat[TracingSelector] = Json.format[TracingSelector] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index b8ea620e1b3..e5299e4b3e1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -22,7 +22,6 @@ import com.scalableminds.webknossos.datastore.services.{ AdHocMeshServiceHolder, BinaryDataService } -import com.scalableminds.webknossos.tracingstore.annotation.{UpdateAction, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -155,7 +154,6 @@ class EditableMappingService @Inject()( } } yield () - */ private def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { val iterator = @@ -190,14 +188,11 @@ class EditableMappingService @Inject()( }.toList) } yield () } + */ def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" - def getBaseMappingName(tracingId: String): Fox[Option[String]] = - // TODO - Fox.successful(None) - def findSegmentIdAtPositionIfNeeded(remoteFallbackLayer: RemoteFallbackLayer, positionOpt: Option[Vec3Int], segmentIdOpt: Option[Long], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 31b40df0961..b56e5c8787e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -613,24 +613,6 @@ class VolumeTracingService @Inject()( additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) - def updateActionLog(tracingId: String, - newestVersion: Option[Long] = None, - oldestVersion: Option[Long] = None): Fox[JsValue] = { - def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = - Json.obj( - "version" -> tuple._1, - "value" -> Json.toJson(tuple._2) - ) - - for { - volumeTracings <- tracingDataStore.volumeUpdates.getMultipleVersionsAsVersionValueTuple( - tracingId, - newestVersion, - oldestVersion)(fromJsonBytes[List[CompactVolumeUpdateAction]]) - updateActionGroupsJs = volumeTracings.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) - } - def updateResolutionList(tracingId: String, tracing: VolumeTracing, resolutions: Set[Vec3Int], diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 6a4af8dd09d..7d9cd93f228 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -39,8 +39,7 @@ GET /mapping/:tracingId/segmentsForAgglomerate POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String) POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) -# TODO rename -GET /volume/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) +GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) # Zarr endpoints for volume annotations # Zarr version 2 From a7ee147a60b650422ab3fe99b8c57fd7ce7dd6e9 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 14:32:48 +0200 Subject: [PATCH 085/361] batching for update action log --- .../annotation/TSAnnotationService.scala | 27 ++++++++++++++----- .../controllers/TSAnnotationController.scala | 3 ++- .../EditableMappingService.scala | 9 +------ 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 10f8d15a8d3..072712bb0d9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -124,20 +124,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Fox.failure("not implemented") // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) - def updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]): Fox[JsValue] = { + def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( + implicit ec: ExecutionContext): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = Json.obj( "version" -> tuple._1, "value" -> Json.toJson(tuple._2) ) + val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) for { - updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( - annotationId, - newestVersion, - oldestVersion)(fromJsonBytes[List[UpdateAction]]) - updateActionGroupsJs = updateActionGroups.map(versionedTupleToJson) - } yield Json.toJson(updateActionGroupsJs) + updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => + val batchFrom = batchRange._1 + val batchTo = batchRange._2 + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(batchTo), + Some(batchFrom))(fromJsonBytes[List[UpdateAction]]) + } + } yield Json.toJson(updateActionBatches.flatten.map(versionedTupleToJson)) } def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, @@ -417,4 +422,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editableMappingInfo <- getEditableMappingInfo(annotationId, tracingId) } yield Some(editableMappingInfo.baseMappingName) else Fox.successful(tracing.mappingName) + + private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = + (0L to ((to - from) / batchSize)).map { batchIndex => + val batchFrom = batchIndex * batchSize + from + val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) + (batchFrom, batchTo) + } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index f4fe393cc77..755a31aee67 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -58,7 +58,8 @@ class TSAnnotationController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { - updateLog <- annotationService.updateActionLog(annotationId, newestVersion, oldestVersion) + newestMaterializableVersion <- annotationService.currentMaterializableVersion(annotationId) + updateLog <- annotationService.updateActionLog(annotationId, newestVersion.getOrElse(newestMaterializableVersion), oldestVersion.getOrElse(0)) } yield Ok(updateLog) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index e5299e4b3e1..79ad28415ad 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -573,12 +573,5 @@ class EditableMappingService @Inject()( } } yield () - */ - - private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = - (0L to ((to - from) / batchSize)).map { batchIndex => - val batchFrom = batchIndex * batchSize + from - val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) - (batchFrom, batchTo) - } + */ } From 65056ec631f29bbfbc6e33ab477b230fe6b17340 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 14:39:13 +0200 Subject: [PATCH 086/361] remove unused stuff from TracingService --- .../tracings/TracingService.scala | 65 +------------------ .../volume/VolumeTracingService.scala | 2 - 2 files changed, 2 insertions(+), 65 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 01e87fa3a37..58cb7da7d16 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,17 +1,14 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} -import com.scalableminds.webknossos.datastore.services.RemoteWebknossosClient +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} -import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging -import play.api.http.Status.CONFLICT import net.liftweb.common.Box import play.api.i18n.MessagesProvider -import play.api.libs.json._ import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import java.util.UUID @@ -47,10 +44,6 @@ trait TracingService[T <: GeneratedMessage] def dummyTracing: T - val handledGroupIdStore: TracingStoreRedisStore - - val uncommittedUpdatesStore: TracingStoreRedisStore - implicit def tracingCompanion: GeneratedMessageCompanion[T] // this should be longer than maxCacheTime in webknossos/AnnotationStore @@ -61,46 +54,9 @@ trait TracingService[T <: GeneratedMessage] // to provide useful error messages to the user if the temporary tracing is no longer present private val temporaryIdStoreTimeout = 10 days - private val handledGroupCacheExpiry: FiniteDuration = 24 hours - - private def transactionGroupKey(tracingId: String, transactionId: String, transactionGroupIndex: Int, version: Long) = - s"transactionGroup___${tracingId}___${transactionId}___${transactionGroupIndex}___$version" - protected def temporaryIdKey(tracingId: String) = s"temporaryTracingId___$tracingId" - private def patternFor(tracingId: String, transactionId: String) = - s"transactionGroup___${tracingId}___${transactionId}___*" - - def saveUncommitted(tracingId: String, - transactionId: String, - transactionGroupIndex: Int, - version: Long, - updateGroup: UpdateActionGroup, - expiry: FiniteDuration): Fox[Unit] = - for { - _ <- Fox.runIf(transactionGroupIndex > 0)( - Fox.assertTrue( - uncommittedUpdatesStore.contains(transactionGroupKey( - tracingId, - transactionId, - transactionGroupIndex - 1, - version))) ?~> s"Incorrect transaction index. Got: $transactionGroupIndex but ${transactionGroupIndex - 1} does not exist" ~> CONFLICT) - _ <- uncommittedUpdatesStore.insert(transactionGroupKey(tracingId, transactionId, transactionGroupIndex, version), - Json.toJson(updateGroup).toString(), - Some(expiry)) - } yield () - - def getAllUncommittedFor(tracingId: String, transactionId: String): Fox[List[UpdateActionGroup]] = - for { - raw: Seq[String] <- uncommittedUpdatesStore.findAllConditional(patternFor(tracingId, transactionId)) - parsed: Seq[UpdateActionGroup] = raw.flatMap(itemAsString => - JsonHelper.jsResultToOpt(Json.parse(itemAsString).validate[UpdateActionGroup])) - } yield parsed.toList.sortBy(_.transactionGroupIndex) - - def removeAllUncommittedFor(tracingId: String, transactionId: String): Fox[Unit] = - uncommittedUpdatesStore.removeAllConditional(patternFor(tracingId, transactionId)) - /* // TODO ? add this to migration? private def migrateTracing(tracingFox: Fox[T], tracingId: String): Fox[T] = tracingMigrationService.migrateTracing(tracingFox).flatMap { @@ -144,23 +100,6 @@ trait TracingService[T <: GeneratedMessage] } } - private def handledGroupKey(tracingId: String, transactionId: String, version: Long, transactionGroupIndex: Int) = - s"handledGroup___${tracingId}___${transactionId}___${version}___$transactionGroupIndex" - - def saveToHandledGroupIdStore(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Unit] = { - val key = handledGroupKey(tracingId, transactionId, version, transactionGroupIndex) - handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) - } - - def handledGroupIdStoreContains(tracingId: String, - transactionId: String, - version: Long, - transactionGroupIndex: Int): Fox[Boolean] = - handledGroupIdStore.contains(handledGroupKey(tracingId, transactionId, version, transactionGroupIndex)) - def merge(tracings: Seq[T], mergedVolumeStats: MergedVolumeStats, newEditableMappingIdOpt: Option[String]): Box[T] def remapTooLargeTreeIds(tracing: T): T = tracing diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b56e5c8787e..96cfea1f6dc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -37,8 +37,6 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files import play.api.libs.Files.TemporaryFileCreator -import play.api.libs.json.{JsObject, JsValue, Json} - import java.io._ import java.nio.file.Paths import java.util.Base64 From 1d9bb89795e5829f9d6eda6b176438d951c26276 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 14:47:40 +0200 Subject: [PATCH 087/361] handle dummy annotation id + dummy tracing id --- app/controllers/UserTokenController.scala | 33 +++++++++++-------- .../WKRemoteTracingStoreController.scala | 11 +++++-- .../TSRemoteWebknossosClient.scala | 2 +- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 3f95d0ff51f..29580465eeb 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -39,7 +39,6 @@ object RpcTokenHolder { class UserTokenController @Inject()(datasetDAO: DatasetDAO, datasetService: DatasetService, - annotationDAO: AnnotationDAO, annotationPrivateLinkDAO: AnnotationPrivateLinkDAO, userService: UserService, organizationDAO: OrganizationDAO, @@ -184,18 +183,24 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case _ => Fox.successful(false) } - // TODO is a dummy annotation id needed? - for { - annotation <- annotationInformationProvider.provideAnnotation(annotationId, userBox)(GlobalAccessContext) ?~> "annotation.notFound" - annotationAccessByToken <- token.map(annotationPrivateLinkDAO.findOneByAccessToken).getOrElse(Fox.empty).futureBox - allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) - restrictions <- annotationInformationProvider.restrictionsFor( - AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" - allowedByUser <- checkRestrictions(restrictions) ?~> "restrictions.failedToCheck" - allowed = allowedByToken || allowedByUser - } yield { - if (allowed) UserAccessAnswer(granted = true) - else UserAccessAnswer(granted = false, Some(s"No ${mode.toString} access to tracing")) + if (annotationId == ObjectId.dummyId.toString) { + Fox.successful(UserAccessAnswer(granted = true)) + } else { + for { + annotation <- annotationInformationProvider.provideAnnotation(annotationId, userBox)(GlobalAccessContext) ?~> "annotation.notFound" + annotationAccessByToken <- token + .map(annotationPrivateLinkDAO.findOneByAccessToken) + .getOrElse(Fox.empty) + .futureBox + allowedByToken = annotationAccessByToken.exists(annotation._id == _._annotation) + restrictions <- annotationInformationProvider.restrictionsFor( + AnnotationIdentifier(annotation.typ, annotation._id))(GlobalAccessContext) ?~> "restrictions.notFound" + allowedByUser <- checkRestrictions(restrictions) ?~> "restrictions.failedToCheck" + allowed = allowedByToken || allowedByUser + } yield { + if (allowed) UserAccessAnswer(granted = true) + else UserAccessAnswer(granted = false, Some(s"No ${mode.toString} access to tracing")) + } } } @@ -208,7 +213,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, jobBox <- jobDAO.findOne(jobIdValidated)(DBAccessContext(userBox)).futureBox answer = jobBox match { case Full(_) => UserAccessAnswer(granted = true) - case _ => UserAccessAnswer(granted = false, Some(s"No ${mode} access to job export")) + case _ => UserAccessAnswer(granted = false, Some(s"No $mode access to job export")) } } yield answer } diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 1476dba7c28..f3b488b8060 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -5,6 +5,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport +import com.scalableminds.webknossos.tracingstore.tracings.TracingIds import javax.inject.Inject import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotationViewOnlyEvent} @@ -118,9 +119,13 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext - for { - annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" - } yield Ok(Json.toJson(annotation._id)) + if (tracingId == TracingIds.dummyTracingId) { + Fox.successful(Ok(Json.toJson(ObjectId.dummyId))) + } else { + for { + annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" + } yield Ok(Json.toJson(annotation._id)) + } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 8024af01d16..7835962dd20 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -22,7 +22,7 @@ import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt case class TracingUpdatesReport(annotationId: String, - // TODO stats per tracing id? + // TODO stats per tracing id? coordinate with frontend timestamps: List[Instant], statistics: Option[JsObject], significantChangesCount: Int, From d092ece22ad1247a78ef47ca7aaa3642471c4388 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 15:11:28 +0200 Subject: [PATCH 088/361] distinguish between updateUserBoundingBoxVisibility in skeleton + volume tracing --- .../model/sagas/skeletontracing_saga.ts | 4 +- .../oxalis/model/sagas/update_actions.ts | 24 +++++-- .../oxalis/model/sagas/volumetracing_saga.tsx | 4 +- .../javascripts/oxalis/view/version_entry.tsx | 6 +- .../annotation/UpdateActions.scala | 67 ++++++++++--------- 5 files changed, 63 insertions(+), 42 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index 96b98f14640..4c56c6f837c 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -27,7 +27,7 @@ import { updateTreeEdgesVisibility, updateNode, updateSkeletonTracing, - updateUserBoundingBoxes, + updateUserBoundingBoxesInSkeletonTracing, updateTree, updateTreeGroups, } from "oxalis/model/sagas/update_actions"; @@ -636,7 +636,7 @@ export function* diffSkeletonTracing( } if (!_.isEqual(prevSkeletonTracing.userBoundingBoxes, skeletonTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(skeletonTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInSkeletonTracing(skeletonTracing.userBoundingBoxes); } } export default [ diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 2d6e15ec4fc..b7ab2a80b87 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -34,7 +34,12 @@ export type CreateSegmentUpdateAction = ReturnType; export type DeleteSegmentUpdateAction = ReturnType; export type DeleteSegmentDataUpdateAction = ReturnType; -type UpdateUserBoundingBoxesUpdateAction = ReturnType; +type UpdateUserBoundingBoxesInSkeletonTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInSkeletonTracing +>; +type UpdateUserBoundingBoxesInVolumeTracingUpdateAction = ReturnType< + typeof updateUserBoundingBoxesInVolumeTracing +>; export type UpdateBucketUpdateAction = ReturnType; type UpdateSegmentGroupsUpdateAction = ReturnType; @@ -61,7 +66,8 @@ export type UpdateAction = | DeleteEdgeUpdateAction | UpdateSkeletonTracingUpdateAction | UpdateVolumeTracingUpdateAction - | UpdateUserBoundingBoxesUpdateAction + | UpdateUserBoundingBoxesInSkeletonTracingUpdateAction + | UpdateUserBoundingBoxesInVolumeTracingUpdateAction | CreateSegmentUpdateAction | UpdateSegmentUpdateAction | DeleteSegmentUpdateAction @@ -314,9 +320,19 @@ export function updateVolumeTracing( }, } as const; } -export function updateUserBoundingBoxes(userBoundingBoxes: Array) { +export function updateUserBoundingBoxesInSkeletonTracing( + userBoundingBoxes: Array, +) { + return { + name: "updateUserBoundingBoxesInSkeletonTracing", + value: { + boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), + }, + } as const; +} +export function updateUserBoundingBoxesInVolumeTracing(userBoundingBoxes: Array) { return { - name: "updateUserBoundingBoxes", + name: "updateUserBoundingBoxesInVolumeTracing", value: { boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), }, diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 42ecceb0d37..938c04d76b7 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -95,7 +95,7 @@ import { deleteSegmentVolumeAction, removeFallbackLayer, updateSegmentVolumeAction, - updateUserBoundingBoxes, + updateUserBoundingBoxesInVolumeTracing, updateVolumeTracing, updateMappingName, } from "oxalis/model/sagas/update_actions"; @@ -700,7 +700,7 @@ export function* diffVolumeTracing( } if (!_.isEqual(prevVolumeTracing.userBoundingBoxes, volumeTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxes(volumeTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInVolumeTracing(volumeTracing.userBoundingBoxes); } if (prevVolumeTracing !== volumeTracing) { diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index 4302438ded2..f9133b26a2f 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -65,7 +65,11 @@ const descriptionFns: Record Descr description: "Created the annotation.", icon: , }), - updateUserBoundingBoxes: (): Description => ({ + updateUserBoundingBoxesInSkeletonTracing: (): Description => ({ + description: "Updated a bounding box.", + icon: , + }), + updateUserBoundingBoxesInVolumeTracing: (): Description => ({ description: "Updated a bounding box.", icon: , }), diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 8c7d0e34cb8..b54db784dc3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -66,41 +66,42 @@ object UpdateAction { val jsonValue = (json \ "value").as[JsObject] (json \ "name").as[String] match { // Skeleton - case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) - case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) - case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) - case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) - case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) - case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) - case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) - case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) - case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) - case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) - case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) - case "updateUserBoundingBoxes" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) - case "updateUserBoundingBoxVisibility" => + case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) + case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) + case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) + case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) + case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) + case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) + case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) + case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) + case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) + case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) + case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) + case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) + case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) + case "updateUserBoundingBoxesInSkeletonTracing" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) + case "updateUserBoundingBoxVisibilityInSkeletonTracing" => deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) // Volume case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) - case "updateUserBoundingBoxes" => + case "updateUserBoundingBoxesInVolumeTracing" => deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) // TODO: rename key (must be different from skeleton action) - case "updateUserBoundingBoxVisibility" => deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) - case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) - case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) - case "updateTdCameraSkeleton" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) // TODO deduplicate? - case "updateTdCameraVolume" => deserialize[UpdateTdCameraVolumeAction](jsonValue) - case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) - case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) - case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) - case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) - case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) - case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + case "updateUserBoundingBoxVisibilityInVolumeTracing" => + deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "updateTdCameraSkeleton" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) // TODO deduplicate? + case "updateTdCameraVolume" => deserialize[UpdateTdCameraVolumeAction](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) // Editable Mapping case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) @@ -164,10 +165,10 @@ object UpdateAction { Json.obj("name" -> "updateTreeEdgesVisibility", "value" -> Json.toJson(s)(UpdateTreeEdgesVisibilitySkeletonAction.jsonFormat)) case s: UpdateUserBoundingBoxesSkeletonAction => - Json.obj("name" -> "updateUserBoundingBoxes", + Json.obj("name" -> "updateUserBoundingBoxesInSkeletonTracing", "value" -> Json.toJson(s)(UpdateUserBoundingBoxesSkeletonAction.jsonFormat)) case s: UpdateUserBoundingBoxVisibilitySkeletonAction => - Json.obj("name" -> "updateUserBoundingBoxVisibility", + Json.obj("name" -> "updateUserBoundingBoxVisibilityInSkeletonTracing", "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) case s: UpdateTdCameraSkeletonAction => Json.obj("name" -> "updateTdCameraSkeleton", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) @@ -178,10 +179,10 @@ object UpdateAction { case s: UpdateTracingVolumeAction => Json.obj("name" -> "updateVolumeTracing", "value" -> Json.toJson(s)(UpdateTracingVolumeAction.jsonFormat)) case s: UpdateUserBoundingBoxesVolumeAction => - Json.obj("name" -> "updateUserBoundingBoxes", + Json.obj("name" -> "updateUserBoundingBoxesInVolumeTracing", "value" -> Json.toJson(s)(UpdateUserBoundingBoxesVolumeAction.jsonFormat)) case s: UpdateUserBoundingBoxVisibilityVolumeAction => - Json.obj("name" -> "updateUserBoundingBoxVisibility", + Json.obj("name" -> "updateUserBoundingBoxVisibilityInVolumeTracing", "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilityVolumeAction.jsonFormat)) case s: RemoveFallbackLayerVolumeAction => Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayerVolumeAction.jsonFormat)) From 2bec9e3c13f819fd1e31e2b8dcc7b9e7ace5f518 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 26 Sep 2024 16:09:26 +0200 Subject: [PATCH 089/361] wip: prepare revertToVersion workflow --- .../annotation/TSAnnotationService.scala | 22 +++++++++++++++---- .../EditableMappingController.scala | 3 +-- .../EditableMappingService.scala | 13 +++++------ .../EditableMappingUpdater.scala | 2 +- .../skeleton/SkeletonTracingService.scala | 1 - 5 files changed, 25 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 072712bb0d9..755bd9f62a8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -78,11 +78,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss if (desiredVersion == existingVersion) Fox.successful(List()) else { for { - updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions( + updateActionGroupsWithVersions <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( annotationId, Some(desiredVersion), Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) - } yield updateActionGroups.reverse.flatten + updateActionGroupsWithVersionsIroned = ironOutReversionFolds(updateActionGroupsWithVersions) + } yield updateActionGroupsWithVersionsIroned } // TODO option to dry apply? @@ -113,12 +114,22 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) + case a: RevertToVersionUpdateAction => + revertToVersion(annotationId, annotationWithTracings, a) case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated + private def revertToVersion( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + // Note: works only after “ironing out” the update action groups + // TODO: read old annotationProto, tracing, buckets, segment indeces + Fox.successful(annotationWithTracings) + def createTracing(a: AddLayerAnnotationUpdateAction)( implicit ec: ExecutionContext): Fox[Either[SkeletonTracing, VolumeTracing]] = Fox.failure("not implemented") @@ -315,8 +326,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case Full(annotationWithTracings) => remainingUpdates match { case List() => Fox.successful(annotationWithTracings) - case RevertToVersionUpdateAction(sourceVersion, _, _, _) :: tail => - ??? case update :: tail => updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) } @@ -335,6 +344,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } + private def ironOutReversionFolds( + updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[UpdateAction] = + // TODO: if the source version is in the current update list, it needs to be ironed out. in case of overlaps, iron out from the back. + updateActionGroupsWithVersions.flatMap(_._2) + private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index cc781fe939d..96430c6e33f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -128,9 +128,8 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- volumeTracingService.find(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService - .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId, remoteFallbackLayer) + .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId) .futureBox segmentIds <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 79ad28415ad..dcc8c012cc6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -107,8 +107,8 @@ class EditableMappingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - // TODO - private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) + // TODO cache materialized stuff again, for e.g. faster bucket loading + // private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) private lazy val segmentToAgglomerateChunkCache: AlfuCache[(String, Long, Long), Seq[(Long, Long)]] = AlfuCache() @@ -298,7 +298,7 @@ class EditableMappingService @Inject()( remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long)(implicit tc: TokenContext): Fox[Array[Byte]] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId, remoteFallbackLayer).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) @@ -405,10 +405,7 @@ class EditableMappingService @Inject()( adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } - def getAgglomerateGraphForId(tracingId: String, - version: Long, - agglomerateId: Long, - remoteFallbackLayer: RemoteFallbackLayer): Fox[AgglomerateGraph] = + def getAgglomerateGraphForId(tracingId: String, version: Long, agglomerateId: Long): Fox[AgglomerateGraph] = for { agglomerateGraph <- agglomerateToGraphCache.getOrLoad( (tracingId, agglomerateId, version), @@ -429,7 +426,7 @@ class EditableMappingService @Inject()( agglomerateId: Long, remoteFallbackLayer: RemoteFallbackLayer)(implicit tc: TokenContext): Fox[AgglomerateGraph] = for { - agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId, remoteFallbackLayer).futureBox + agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox agglomerateGraph <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph) case Empty => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 5dc1e3017ba..715a3055984 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -455,7 +455,7 @@ class EditableMappingUpdater( for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) _ <- editableMappingService - .getAgglomerateGraphForId(tracingId, revertAction.sourceVersion, agglomerateId, remoteFallbackLayer) + .getAgglomerateGraphForId(tracingId, revertAction.sourceVersion, agglomerateId) .futureBox .map { case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 66c8ac9c9d9..1ee7bb354c1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.datastore.services.RemoteWebknossosClient import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings._ From 538395f53d986cd8ae6511e9a54dad4652e44e78 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Oct 2024 11:46:34 +0200 Subject: [PATCH 090/361] wip revert to version --- .../annotation/AnnotationReversion.scala | 15 ++++ .../annotation/TSAnnotationService.scala | 89 ++++++++++++------- .../skeleton/SkeletonTracingService.scala | 6 +- .../volume/VolumeTracingService.scala | 6 +- 4 files changed, 81 insertions(+), 35 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala new file mode 100644 index 00000000000..4d4bba4b08b --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -0,0 +1,15 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import com.scalableminds.util.tools.Fox + +import scala.concurrent.ExecutionContext + +trait AnnotationReversion { + + def revertDistributedElements(annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[Unit] = + // TODO segment index, volume buckets, proofreading data + Fox.successful(()) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 755bd9f62a8..b58965d35b8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} -import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerTypeProto, AnnotationProto} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -52,6 +52,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss extends KeyValueStoreImplicits with FallbackDataHelper with ProtoGeometryImplicits + with AnnotationReversion with LazyLogging { def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = @@ -122,13 +123,21 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield updated - private def revertToVersion( - annotationId: String, - annotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + private def revertToVersion(annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = // Note: works only after “ironing out” the update action groups // TODO: read old annotationProto, tracing, buckets, segment indeces - Fox.successful(annotationWithTracings) + for { + sourceAnnotation <- getWithTracings(annotationId, + Some(revertAction.sourceVersion), + List.empty, + List.empty, + requestAll = true) // TODO do we need to request the others? + _ <- revertDistributedElements(annotationId, sourceAnnotation, revertAction) + } yield sourceAnnotation def createTracing(a: AddLayerAnnotationUpdateAction)( implicit ec: ExecutionContext): Fox[Either[SkeletonTracing, VolumeTracing]] = @@ -159,14 +168,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { - withTracings <- getWithTracings(annotationId, version, List.empty, List.empty) + withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, requestAll = false) } yield withTracings.annotation - def getWithTracings(annotationId: String, - version: Option[Long], - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String])(implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationWithTracings] = + def getWithTracings( + annotationId: String, + version: Option[Long], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String], + requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" annotation = annotationWithVersion.value @@ -174,14 +184,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId, version, requestedSkeletonTracingIds, - requestedVolumeTracingIds) ?~> "applyUpdates.failed" + requestedVolumeTracingIds, + requestAll) ?~> "applyUpdates.failed" } yield updated def getEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( implicit ec: ExecutionContext, tc: TokenContext): Fox[EditableMappingInfo] = for { - annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId)) ?~> "getWithTracings.failed" + annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) ?~> "getWithTracings.failed" tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing @@ -202,20 +213,21 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion) } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo.value, updater) - private def applyPendingUpdates(annotation: AnnotationProto, - annotationId: String, - targetVersionOpt: Option[Long], - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String])( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationWithTracings] = + private def applyPendingUpdates( + annotation: AnnotationProto, + annotationId: String, + targetVersionOpt: Option[Long], + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String], + requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) ?~> "determineTargetVersion.failed" updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" annotationWithTracings <- findTracingsForUpdates(annotation, updates, requestedSkeletonTracingIds, - requestedVolumeTracingIds) ?~> "findTracingsForUpdates.failed" + requestedVolumeTracingIds, + requestAll) ?~> "findTracingsForUpdates.failed" annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationId, annotationWithTracings, updates, @@ -283,23 +295,34 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotation: AnnotationProto, updates: List[UpdateAction], requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String])(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { - val skeletonTracingIds = (updates.flatMap { - case u: SkeletonUpdateAction => Some(u.actionTracingId) - case _ => None - } ++ requestedSkeletonTracingIds).distinct - val volumeTracingIds = (updates.flatMap { - case u: VolumeUpdateAction => Some(u.actionTracingId) - case _ => None - } ++ requestedVolumeTracingIds).distinct + requestedVolumeTracingIds: List[String], + requestAll: Boolean)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + val skeletonTracingIds = + if (requestAll) + annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.skeleton).map(_.tracingId) + else { + (updates.flatMap { + case u: SkeletonUpdateAction => Some(u.actionTracingId) + case _ => None + } ++ requestedSkeletonTracingIds).distinct + } + val volumeTracingIds = + if (requestAll) + annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.volume).map(_.tracingId) + else { + (updates.flatMap { + case u: VolumeUpdateAction => Some(u.actionTracingId) + case _ => None + } ++ requestedVolumeTracingIds).distinct + } logger.info(s"fetching volumes $volumeTracingIds and skeletons $skeletonTracingIds") for { - skeletonTracings <- Fox.serialCombined(skeletonTracingIds)( + skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)( id => tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( fromProtoBytes[SkeletonTracing])) - volumeTracings <- Fox.serialCombined(volumeTracingIds)( + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)( id => tracingDataStore.volumes .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 1ee7bb354c1..0512226823f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -46,7 +46,11 @@ class SkeletonTracingService @Inject()( Fox.successful(dummyTracing) else { for { - annotation <- annotationService.getWithTracings(annotationId, version, List(tracingId), List.empty) // TODO is applyUpdates still needed? + annotation <- annotationService.getWithTracings(annotationId, + version, + List(tracingId), + List.empty, + requestAll = false) // TODO is applyUpdates still needed? tracing <- annotation.getSkeleton(tracingId) } yield tracing } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 96cfea1f6dc..bf8b16b1bb3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -185,7 +185,11 @@ class VolumeTracingService @Inject()( Fox.successful(dummyTracing) else { for { - annotation <- annotationService.getWithTracings(annotationId, version, List.empty, List(tracingId)) // TODO is applyUpdates still needed? + annotation <- annotationService.getWithTracings(annotationId, + version, + List.empty, + List(tracingId), + requestAll = false) // TODO is applyUpdates still needed? tracing <- annotation.getVolume(tracingId) } yield tracing } From 8b8b4c0d298c51d0e149dfec227403aa140cd09c Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Oct 2024 13:27:39 +0200 Subject: [PATCH 091/361] revert to version format --- .../annotation/TSAnnotationService.scala | 2 ++ .../annotation/UpdateActions.scala | 6 +++--- .../updating/SkeletonUpdateActions.scala | 21 ------------------- 3 files changed, 5 insertions(+), 24 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index b58965d35b8..476aa3f5490 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -136,6 +136,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss List.empty, List.empty, requestAll = true) // TODO do we need to request the others? + _ = logger.info( + s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version}") _ <- revertDistributedElements(annotationId, sourceAnnotation, revertAction) } yield sourceAnnotation diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index b54db784dc3..6b56c292440 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -13,7 +13,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ DeleteTreeSkeletonAction, MergeTreeSkeletonAction, MoveTreeComponentSkeletonAction, - RevertToVersionSkeletonAction, UpdateNodeSkeletonAction, UpdateTdCameraSkeletonAction, UpdateTracingSkeletonAction, @@ -112,6 +111,7 @@ object UpdateAction { case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationUpdateAction](jsonValue) case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationUpdateAction](jsonValue) case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationUpdateAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionUpdateAction](jsonValue) case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") } @@ -153,8 +153,6 @@ object UpdateAction { Json.obj("name" -> "updateTreeGroups", "value" -> Json.toJson(s)(UpdateTreeGroupsSkeletonAction.jsonFormat)) case s: UpdateTracingSkeletonAction => Json.obj("name" -> "updateSkeletonTracing", "value" -> Json.toJson(s)(UpdateTracingSkeletonAction.jsonFormat)) - case s: RevertToVersionSkeletonAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionSkeletonAction.jsonFormat)) case s: UpdateTreeVisibilitySkeletonAction => Json.obj("name" -> "updateTreeVisibility", "value" -> Json.toJson(s)(UpdateTreeVisibilitySkeletonAction.jsonFormat)) @@ -220,6 +218,8 @@ object UpdateAction { case s: UpdateMetadataAnnotationUpdateAction => Json.obj("name" -> "updateMetadataOfAnnotation", "value" -> Json.toJson(s)(UpdateMetadataAnnotationUpdateAction.jsonFormat)) + case s: RevertToVersionUpdateAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionUpdateAction.jsonFormat)) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index aa82e9af23b..41f3988feb2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -404,24 +404,6 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], this.copy(actionAuthorId = authorId) } -case class RevertToVersionSkeletonAction(sourceVersion: Long, - actionTracingId: String, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends SkeletonUpdateAction { - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = - throw new Exception("RevertToVersionAction applied on unversioned tracing") - - override def addTimestamp(timestamp: Long): UpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - - override def addAuthorId(authorId: Option[String]): UpdateAction = - this.copy(actionAuthorId = authorId) -} - case class UpdateTreeVisibilitySkeletonAction(treeId: Int, isVisible: Boolean, actionTracingId: String, @@ -607,9 +589,6 @@ object UpdateTreeGroupsSkeletonAction { object UpdateTracingSkeletonAction { implicit val jsonFormat: OFormat[UpdateTracingSkeletonAction] = Json.format[UpdateTracingSkeletonAction] } -object RevertToVersionSkeletonAction { - implicit val jsonFormat: OFormat[RevertToVersionSkeletonAction] = Json.format[RevertToVersionSkeletonAction] -} object UpdateTreeVisibilitySkeletonAction { implicit val jsonFormat: OFormat[UpdateTreeVisibilitySkeletonAction] = Json.format[UpdateTreeVisibilitySkeletonAction] } From 0abf0789d47f6a4977b8e8250cac6f7daa39b37d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Oct 2024 14:21:19 +0200 Subject: [PATCH 092/361] fix update action order --- .../annotation/AnnotationWithTracings.scala | 7 ++++++- .../annotation/TSAnnotationService.scala | 18 +++++++++++------- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 37b17dfb8f5..42c41169f3d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -13,6 +13,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ } import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.ApplyableVolumeUpdateAction +import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Failure, Full} import scala.concurrent.ExecutionContext @@ -20,7 +21,8 @@ import scala.concurrent.ExecutionContext case class AnnotationWithTracings( annotation: AnnotationProto, tracingsById: Map[String, Either[SkeletonTracing, VolumeTracing]], - editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) { + editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) + extends LazyLogging { def getSkeleton(tracingId: String): Box[SkeletonTracing] = for { @@ -142,4 +144,7 @@ case class AnnotationWithTracings( } yield () } + def skeletonStats: String = + f"skeleton with ${getSkeletons.map(_._2).map(_.trees.map(_.nodes.length).sum).mkString} nodes" + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 476aa3f5490..6a3207058c4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -131,13 +131,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss // Note: works only after “ironing out” the update action groups // TODO: read old annotationProto, tracing, buckets, segment indeces for { - sourceAnnotation <- getWithTracings(annotationId, - Some(revertAction.sourceVersion), - List.empty, - List.empty, - requestAll = true) // TODO do we need to request the others? + sourceAnnotation: AnnotationWithTracings <- getWithTracings( + annotationId, + Some(revertAction.sourceVersion), + List.empty, + List.empty, + requestAll = true) // TODO do we need to request the others? _ = logger.info( - s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version}") + s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") _ <- revertDistributedElements(annotationId, sourceAnnotation, revertAction) } yield sourceAnnotation @@ -352,6 +353,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remainingUpdates match { case List() => Fox.successful(annotationWithTracings) case update :: tail => + logger.info( + f"${remainingUpdates.length} remainingUpdates, current skeleton ${annotationWithTracings.skeletonStats})") updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) } case _ => annotationWithTracingsFox @@ -362,6 +365,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updated <- updateIter(Some(annotation), updates) updatedWithNewVerson = updated.withVersion(targetVersion) + _ = logger.info(s"flushing, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) @@ -372,7 +376,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def ironOutReversionFolds( updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[UpdateAction] = // TODO: if the source version is in the current update list, it needs to be ironed out. in case of overlaps, iron out from the back. - updateActionGroupsWithVersions.flatMap(_._2) + updateActionGroupsWithVersions.reverse.flatMap(_._2) private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) From 1127a9217779abaf72cc978dae810f9441eaac1e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 8 Oct 2024 10:15:45 +0200 Subject: [PATCH 093/361] resolve circular dependency between the services --- .../TSRemoteWebknossosClient.scala | 3 +- .../annotation/AnnotationReversion.scala | 17 +- .../AnnotationTransactionService.scala | 25 ++- .../annotation/TSAnnotationService.scala | 69 ++++++- .../EditableMappingController.scala | 16 +- .../SkeletonTracingController.scala | 128 +++++++++++-- .../controllers/TracingController.scala | 131 -------------- .../controllers/VolumeTracingController.scala | 170 ++++++++++++++---- ...VolumeTracingZarrStreamingController.scala | 20 +-- .../tracings/TracingService.scala | 22 --- .../skeleton/SkeletonTracingService.scala | 20 --- .../tracings/volume/TSFullMeshService.scala | 4 +- .../VolumeSegmentStatisticsService.scala | 8 +- .../volume/VolumeTracingDownsampling.scala | 33 ++-- .../volume/VolumeTracingService.scala | 74 +++----- 15 files changed, 410 insertions(+), 330 deletions(-) delete mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 7835962dd20..70852b533a6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -54,10 +54,11 @@ class TSRemoteWebknossosClient @Inject()( .silent .post(Json.toJson(tracingUpdatesReport)) - def getDataSourceForTracing(tracingId: String): Fox[DataSourceLike] = + def getDataSourceForTracing(tracingId: String)(implicit tc: TokenContext): Fox[DataSourceLike] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSource") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .withTokenFromContext .getWithJsonResponse[DataSourceLike] def getDataStoreUriForDataSource(organizationId: String, datasetName: String): Fox[String] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index 4d4bba4b08b..c26f5bf7bfc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -1,15 +1,26 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeSegmentIndexBuffer import scala.concurrent.ExecutionContext trait AnnotationReversion { def revertDistributedElements(annotationId: String, - annotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[Unit] = + currentAnnotationWithTracings: AnnotationWithTracings, + sourceAnnotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = // TODO segment index, volume buckets, proofreading data - Fox.successful(()) + for { + _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { + // Only volume data for volume layers present in the *source annotation* needs to be reverted. + case (tracingId, sourceTracing) => Fox.successful(()) + //revertVolumeData(annotationId, tracingId, sourceTracing, revertAction.sourceVersion, newVersion) + } + } yield () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 0bedbdab47a..e96ffd31641 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -41,12 +41,12 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe private def patternFor(annotationId: String, transactionId: String) = s"transactionGroup___${annotationId}___${transactionId}___*" - def saveUncommitted(annotationId: String, - transactionId: String, - transactionGroupIndex: Int, - version: Long, - updateGroup: UpdateActionGroup, - expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = + private def saveUncommitted(annotationId: String, + transactionId: String, + transactionGroupIndex: Int, + version: Long, + updateGroup: UpdateActionGroup, + expiry: FiniteDuration)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.runIf(transactionGroupIndex > 0)( Fox.assertTrue( @@ -188,8 +188,17 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe updateActionsJson <- Fox.successful(Json.toJson(preprocessActionsForStorage(updateActionGroup))) _ <- tracingDataStore.annotationUpdates.put(annotationId, updateActionGroup.version, updateActionsJson) bucketMutatingActions = findBucketMutatingActions(updateActionGroup) - _ <- Fox.runIf(bucketMutatingActions.nonEmpty)( - volumeTracingService.applyBucketMutatingActions(annotationId, bucketMutatingActions, updateActionGroup.version)) + actionsGrouped: Map[String, List[BucketMutatingVolumeUpdateAction]] = bucketMutatingActions.groupBy( + _.actionTracingId) + _ <- Fox.serialCombined(actionsGrouped.keys.toList) { volumeTracingId => + for { + tracing <- annotationService.findVolume(annotationId, volumeTracingId) + _ <- volumeTracingService.applyBucketMutatingActions(volumeTracingId, + tracing, + bucketMutatingActions, + updateActionGroup.version) + } yield () + } } yield () private def findBucketMutatingActions(updateActionGroup: UpdateActionGroup): List[BucketMutatingVolumeUpdateAction] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 6a3207058c4..567e9d47c0a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingUpdateAction, EditableMappingUpdater } +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ CreateNodeSkeletonAction, DeleteNodeSkeletonAction, @@ -25,12 +26,15 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, BucketMutatingVolumeUpdateAction, UpdateMappingNameVolumeAction, + VolumeTracingService, VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, TracingDataStore, + TracingIds, + TracingSelector, VersionedKeyValuePair } import com.scalableminds.webknossos.tracingstore.{ @@ -47,6 +51,8 @@ import scala.concurrent.ExecutionContext class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient, editableMappingService: EditableMappingService, + volumeTracingService: VolumeTracingService, + skeletonTracingService: SkeletonTracingService, val remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits @@ -139,7 +145,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll = true) // TODO do we need to request the others? _ = logger.info( s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") - _ <- revertDistributedElements(annotationId, sourceAnnotation, revertAction) + // _ <- revertDistributedElements(annotationId, annotationWithTracings, sourceAnnotation, revertAction) } yield sourceAnnotation def createTracing(a: AddLayerAnnotationUpdateAction)( @@ -473,4 +479,65 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss (batchFrom, batchTo) } + def findVolume(annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = + if (tracingId == TracingIds.dummyTracingId) + Fox.successful(volumeTracingService.dummyTracing) + else { + for { + annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) // TODO is applyUpdates still needed? + tracing <- annotation.getVolume(tracingId) + } yield tracing + } + + def findSkeleton( + annotationId: String, + tracingId: String, + version: Option[Long] = None, + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = + if (tracingId == TracingIds.dummyTracingId) + Fox.successful(skeletonTracingService.dummyTracing) + else { + for { + annotation <- getWithTracings(annotationId, version, List(tracingId), List.empty, requestAll = false) // TODO is applyUpdates still needed? + tracing <- annotation.getSkeleton(tracingId) + } yield tracing + } + + def findMultipleVolumes(selectors: List[Option[TracingSelector]], + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findVolume(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) + .map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + + def findMultipleSkeletons(selectors: List[Option[TracingSelector]], + useCache: Boolean = true, + applyUpdates: Boolean = false)(implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = + Fox.combined { + selectors.map { + case Some(selector) => + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) + .map(Some(_)) + } yield tracing + case None => Fox.successful(None) + } + } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 96430c6e33f..8e5cc234a5e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -41,7 +41,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" _ <- assertMappingIsNotLocked(tracing) _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" @@ -82,7 +82,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { for { - tracing <- tracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) mappingName <- tracing.mappingName.toFox _ <- editableMappingService.assertTracingHasEditableMapping(tracing) currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) @@ -111,7 +111,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version) infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) @@ -126,7 +126,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) agglomerateGraphBox: Box[AgglomerateGraph] <- editableMappingService .getAgglomerateGraphForId(tracingId, tracing.version, agglomerateId) @@ -148,7 +148,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version = None) @@ -170,7 +170,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) @@ -190,7 +190,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) @@ -209,7 +209,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index f74c54dc999..fe1e105568e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -6,23 +6,28 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import net.liftweb.common.Empty +import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages import play.api.libs.json.Json +import com.scalableminds.webknossos.datastore.controllers.Controller import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext -class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingService, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val accessTokenService: TracingStoreAccessTokenService, - val slackNotificationService: TSSlackNotificationService)( +class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracingService, + remoteWebknossosClient: TSRemoteWebknossosClient, + annotationService: TSAnnotationService, + accessTokenService: TracingStoreAccessTokenService, + slackNotificationService: TSSlackNotificationService)( implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[SkeletonTracing, SkeletonTracings] { + extends Controller { implicit val tracingsCompanion: SkeletonTracings.type = SkeletonTracings @@ -35,15 +40,104 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer implicit def unpackMultiple(tracings: SkeletonTracings): List[Option[SkeletonTracing]] = tracings.tracings.toList.map(_.tracing) + def save(): Action[SkeletonTracing] = Action.async(validateProto[SkeletonTracing]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + skeletonTracingService.save(tracing, None, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[SkeletonTracing] => + tracingOpt match { + case Some(tracing) => skeletonTracingService.save(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findSkeleton(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( + "tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } + + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracings <- annotationService.findMultipleSkeletons(request.body, applyUpdates = true) + } yield { + Ok(tracings.toByteArray).as(protobufMimeType) + } + } + } + } + + def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracingOpts <- annotationService.findMultipleSkeletons(request.body, applyUpdates = true) ?~> Messages( + "tracing.notFound") + tracingsWithIds = tracingOpts.zip(request.body).flatMap { + case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) + case _ => None + } + newTracingId = skeletonTracingService.generateTracingId + mergedVolumeStats <- skeletonTracingService.mergeVolumeData(request.body.flatten, + tracingsWithIds.map(_._1), + newTracingId, + newVersion = 0L, + toCache = !persist) + mergeEditableMappingsResultBox <- skeletonTracingService + .mergeEditableMappings(newTracingId, tracingsWithIds) + .futureBox + newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { + case Full(()) => Fox.successful(Some(newTracingId)) + case Empty => Fox.successful(None) + case f: Failure => f.toFox + } + mergedTracing <- Fox.box2Fox( + skeletonTracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) + _ <- skeletonTracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + def mergedFromContents(persist: Boolean): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracings: List[Option[SkeletonTracing]] = request.body for { - mergedTracing <- Fox.box2Fox(tracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) - processedTracing = tracingService.remapTooLargeTreeIds(mergedTracing) - newId <- tracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) + mergedTracing <- Fox.box2Fox( + skeletonTracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) + processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) + newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) } yield Ok(Json.toJson(newId)) } } @@ -60,19 +154,17 @@ class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingSer accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( + tracing <- annotationService.findSkeleton(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( "tracing.notFound") editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - newId <- tracingService.duplicate(tracing, - fromTask.getOrElse(false), - editPositionParsed, - editRotationParsed, - boundingBoxParsed) - } yield { - Ok(Json.toJson(newId)) - } + newId <- skeletonTracingService.duplicate(tracing, + fromTask.getOrElse(false), + editPositionParsed, + editRotationParsed, + boundingBoxParsed) + } yield Ok(Json.toJson(newId)) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala deleted file mode 100644 index a7ecc6fb09c..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ /dev/null @@ -1,131 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.controllers - -import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.controllers.Controller -import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{TracingSelector, TracingService} -import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import net.liftweb.common.{Empty, Failure, Full} -import play.api.i18n.Messages -import play.api.libs.json.Json -import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import scalapb.{GeneratedMessage, GeneratedMessageCompanion} - -import scala.concurrent.ExecutionContext - -trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends Controller { - - def tracingService: TracingService[T] - - def remoteWebknossosClient: TSRemoteWebknossosClient - - def accessTokenService: TracingStoreAccessTokenService - - def slackNotificationService: TSSlackNotificationService - - implicit val tracingCompanion: GeneratedMessageCompanion[T] = tracingService.tracingCompanion - - implicit val tracingsCompanion: GeneratedMessageCompanion[Ts] - - implicit def unpackMultiple(tracings: Ts): List[Option[T]] - - implicit def packMultiple(tracings: List[T]): Ts - - implicit def packMultipleOpt(tracings: List[Option[T]]): Ts - - implicit val ec: ExecutionContext - - implicit val bodyParsers: PlayBodyParsers - - override def allowRemoteOrigin: Boolean = true - - def save(): Action[T] = Action.async(validateProto[T]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - val tracing = request.body - tracingService.save(tracing, None, 0).map { newId => - Ok(Json.toJson(newId)) - } - } - } - } - } - - def saveMultiple(): Action[Ts] = Action.async(validateProto[Ts]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[T] => - tracingOpt match { - case Some(tracing) => tracingService.save(tracing, None, 0).map(Some(_)) - case _ => Fox.successful(None) - } - }) - savedIds.map(id => Ok(Json.toJson(id))) - } - } - } - } - - def get(tracingId: String, version: Option[Long]): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( - "tracing.notFound") - } yield Ok(tracing.toByteArray).as(protobufMimeType) - } - } - } - - def getMultiple: Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - tracings <- tracingService.findMultiple(request.body, applyUpdates = true) - } yield { - Ok(tracings.toByteArray).as(protobufMimeType) - } - } - } - } - - def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - tracingOpts <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages( - "tracing.notFound") - tracingsWithIds = tracingOpts.zip(request.body).flatMap { - case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) - case _ => None - } - newTracingId = tracingService.generateTracingId - mergedVolumeStats <- tracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newTracingId, - newVersion = 0L, - toCache = !persist) - mergeEditableMappingsResultBox <- tracingService - .mergeEditableMappings(newTracingId, tracingsWithIds) - .futureBox - newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { - case Full(()) => Fox.successful(Some(newTracingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox - } - mergedTracing <- Fox.box2Fox( - tracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) - _ <- tracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newTracingId)) - } - } - } -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 243761dea06..f84c7ef7d53 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -6,7 +6,9 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} +import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.helpers.{ GetSegmentIndexParameters, ProtoGeometryImplicits, @@ -33,14 +35,14 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeSegmentStatisticsService, VolumeTracingService } -import com.scalableminds.webknossos.tracingstore.tracings.KeyValueStoreImplicits +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingSelector} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig } -import net.liftweb.common.Empty +import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile import play.api.libs.json.Json @@ -51,7 +53,7 @@ import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext class VolumeTracingController @Inject()( - val tracingService: VolumeTracingService, + val volumeTracingService: VolumeTracingService, val config: TracingStoreConfig, val remoteDataStoreClient: TSRemoteDatastoreClient, val accessTokenService: TracingStoreAccessTokenService, @@ -63,7 +65,7 @@ class VolumeTracingController @Inject()( volumeSegmentIndexService: VolumeSegmentIndexService, fullMeshService: TSFullMeshService, val rpc: RPC)(implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) - extends TracingController[VolumeTracing, VolumeTracings] + extends Controller with ProtoGeometryImplicits with KeyValueStoreImplicits { @@ -78,6 +80,94 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) + def save(): Action[VolumeTracing] = Action.async(validateProto[VolumeTracing]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + volumeTracingService.save(tracing, None, 0).map { newId => + Ok(Json.toJson(newId)) + } + } + } + } + } + + def saveMultiple(): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[VolumeTracing] => + tracingOpt match { + case Some(tracing) => volumeTracingService.save(tracing, None, 0).map(Some(_)) + case _ => Fox.successful(None) + } + }) + savedIds.map(id => Ok(Json.toJson(id))) + } + } + } + } + + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + Action.async { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( + "tracing.notFound") + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } + + def getMultiple: Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracings <- annotationService.findMultipleVolumes(request.body, applyUpdates = true) + } yield { + Ok(tracings.toByteArray).as(protobufMimeType) + } + } + } + } + + def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = + Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + tracingOpts <- annotationService.findMultipleVolumes(request.body, applyUpdates = true) ?~> Messages( + "tracing.notFound") + tracingsWithIds = tracingOpts.zip(request.body).flatMap { + case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) + case _ => None + } + newTracingId = volumeTracingService.generateTracingId + mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, + tracingsWithIds.map(_._1), + newTracingId, + newVersion = 0L, + toCache = !persist) + mergeEditableMappingsResultBox <- volumeTracingService + .mergeEditableMappings(newTracingId, tracingsWithIds) + .futureBox + newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { + case Full(()) => Fox.successful(Some(newTracingId)) + case Empty => Fox.successful(None) + case f: Failure => f.toFox + } + mergedTracing <- Fox.box2Fox( + volumeTracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) + _ <- volumeTracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + def initialData(tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]): Action[AnyContent] = Action.async { implicit request => log() { @@ -86,12 +176,12 @@ class VolumeTracingController @Inject()( for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) - resolutions <- tracingService + resolutions <- volumeTracingService .initializeWithData(annotationId, tracingId, tracing, initialData, resolutionRestrictions) .toFox - _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) + _ <- volumeTracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) } } @@ -106,12 +196,14 @@ class VolumeTracingController @Inject()( _ <- Fox.successful(()) tracings = request.body shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings.flatten) - mt <- tracingService.merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty).toFox + mt <- volumeTracingService + .merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty) + .toFox // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- tracingService.save(mergedTracing, None, mergedTracing.version, toCache = !persist) + newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toCache = !persist) } yield Ok(Json.toJson(newId)) } } @@ -125,11 +217,11 @@ class VolumeTracingController @Inject()( for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") - resolutions <- tracingService + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + resolutions <- volumeTracingService .initializeWithDataMultiple(annotationId, tracingId, tracing, initialData) .toFox - _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) + _ <- volumeTracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) } } @@ -146,13 +238,13 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId, version) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) voxelSize = voxelSizeFactorParsedOpt.map(voxelSizeParsed => VoxelSize.fromFactorAndUnitWithDefault(voxelSizeParsed, voxelSizeUnitParsedOpt)) - data <- tracingService.allDataZip( + data <- volumeTracingService.allDataZip( tracingId, tracing, volumeDataZipFormatParsed, @@ -169,11 +261,11 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) { val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) editableMappingService.volumeData(mappingLayer, request.body) - } else tracingService.data(tracingId, tracing, request.body) + } else volumeTracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) } } @@ -198,7 +290,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") _ = logger.info(s"Duplicating volume tracing $tracingId...") datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) @@ -206,12 +298,12 @@ class VolumeTracingController @Inject()( editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( - tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newTracingId = tracingService.generateTracingId + volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) + newTracingId = volumeTracingService.generateTracingId // TODO /*_ <- Fox.runIf(tracing.getHasEditableMapping)( editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ - (newId, newTracing) <- tracingService.duplicate( + (newId, newTracing) <- volumeTracingService.duplicate( annotationId, tracingId, newTracingId, @@ -224,7 +316,8 @@ class VolumeTracingController @Inject()( boundingBoxParsed, mappingName = None ) - _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(annotationId, newId, tracingId, newTracing)) + _ <- Fox.runIfOptionTrue(downsample)( + volumeTracingService.downsample(annotationId, newId, tracingId, newTracing)) } yield Ok(Json.toJson(newId)) } } @@ -237,14 +330,14 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - largestSegmentId <- tracingService.importVolumeData(annotationId, - tracingId, - tracing, - zipFile, - currentVersion) + largestSegmentId <- volumeTracingService.importVolumeData(annotationId, + tracingId, + tracing, + zipFile, + currentVersion) } yield Ok(Json.toJson(largestSegmentId)) } } @@ -256,11 +349,11 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- annotationService.currentMaterializableVersion(tracingId) before = Instant.now - canAddSegmentIndex <- tracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing) - processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)(tracingService + canAddSegmentIndex <- volumeTracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing) + processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)(volumeTracingService .addSegmentIndex(annotationId, tracingId, tracing, currentVersion, dryRun)) ?~> "addSegmentIndex.failed" currentVersionNew <- annotationService.currentMaterializableVersion(tracingId) _ <- Fox.runIf(!dryRun)(bool2Fox( @@ -283,11 +376,11 @@ class VolumeTracingController @Inject()( // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") (vertices: Array[Float], neighbors: List[Int]) <- if (tracing.getHasEditableMapping) { val editableMappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) editableMappingService.createAdHocMesh(editableMappingLayer, request.body) - } else tracingService.createAdHocMesh(tracingId, tracing, request.body) + } else volumeTracingService.createAdHocMesh(tracingId, tracing, request.body) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) @@ -317,7 +410,8 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - positionOpt <- tracingService.findData(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + positionOpt <- volumeTracingService.findData(tracingId, tracing) } yield { Ok(Json.obj("position" -> positionOpt, "resolution" -> positionOpt.map(_ => Vec3Int.ones))) } @@ -329,7 +423,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentVolumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentVolume(annotationId, @@ -348,7 +442,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) segmentBoundingBoxes: List[BoundingBox] <- Fox.serialCombined(request.body.segmentIds) { segmentId => volumeSegmentStatisticsService.getSegmentBoundingBox(annotationId, @@ -367,8 +461,8 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - fallbackLayer <- tracingService.getFallbackLayer(annotationId, tracingId) - tracing <- tracingService.find(annotationId, tracingId) + tracing <- annotationService.findVolume(annotationId, tracingId) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) _ <- bool2Fox(DataLayer.bucketSize <= request.body.cubeSize) ?~> "cubeSize must be at least one bucket (32³)" bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService @@ -380,7 +474,7 @@ class VolumeTracingController @Inject()( additionalCoordinates = request.body.additionalCoordinates, additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes), mappingName = mappingName, - editableMappingTracingId = tracingService.editableMappingTracingId(tracing, tracingId) + editableMappingTracingId = volumeTracingService.editableMappingTracingId(tracing, tracingId) ) bucketPositionsForCubeSize = bucketPositionsRaw.values .map(vec3IntFromProto) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 7108a8104f6..2e7012c355b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -64,7 +64,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -84,7 +84,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto(_).toMagLiteral(allowScalar = true)) additionalFiles = if (zarrVersion == 2) List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -98,7 +98,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -118,7 +118,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -132,7 +132,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(existingMags.contains(magParsed)) ?~> Messages("tracing.wrongMag", tracingId, mag) ~> NOT_FOUND @@ -167,7 +167,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND @@ -226,7 +226,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(tracingId, @@ -242,7 +242,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(tracingId, @@ -259,7 +259,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND zarrLayer = ZarrSegmentationLayer( name = tracingName.getOrElse(tracingId), largestSegmentId = tracing.largestSegmentId, @@ -280,7 +280,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- tracingService.find(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 58cb7da7d16..afe6fd032a4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -3,7 +3,6 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} -import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging @@ -40,8 +39,6 @@ trait TracingService[T <: GeneratedMessage] def tracingMigrationService: TracingMigrationService[T] - def annotationService: TSAnnotationService - def dummyTracing: T implicit def tracingCompanion: GeneratedMessageCompanion[T] @@ -68,25 +65,6 @@ trait TracingService[T <: GeneratedMessage] } */ - def find(annotationId: String, - tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, - applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[T] - - def findMultiple(selectors: List[Option[TracingSelector]], useCache: Boolean = true, applyUpdates: Boolean = false)( - implicit tc: TokenContext): Fox[List[Option[T]]] = - Fox.combined { - selectors.map { - case Some(selector) => - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) - tracing <- find(annotationId, selector.tracingId, selector.version, useCache, applyUpdates).map(Some(_)) - } yield tracing - case None => Fox.successful(None) - } - } - def generateTracingId: String = UUID.randomUUID.toString def save(tracing: T, tracingId: Option[String], version: Long, toCache: Boolean = false): Fox[String] = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 0512226823f..dcbe1eb86c0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -9,7 +9,6 @@ import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} -import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import net.liftweb.common.{Box, Full} @@ -24,7 +23,6 @@ class SkeletonTracingService @Inject()( val temporaryTracingIdStore: TracingStoreRedisStore, val remoteWebknossosClient: TSRemoteWebknossosClient, val uncommittedUpdatesStore: TracingStoreRedisStore, - val annotationService: TSAnnotationService, val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) extends TracingService[SkeletonTracing] with KeyValueStoreImplicits @@ -37,24 +35,6 @@ class SkeletonTracingService @Inject()( implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - def find(annotationId: String, - tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, - applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[SkeletonTracing] = - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(dummyTracing) - else { - for { - annotation <- annotationService.getWithTracings(annotationId, - version, - List(tracingId), - List.empty, - requestAll = false) // TODO is applyUpdates still needed? - tracing <- annotation.getSkeleton(tracingId) - } yield tracing - } - def duplicate(tracing: SkeletonTracing, fromTask: Boolean, editPosition: Option[Vec3Int], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 541b0ed519a..630b587c9b4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -40,7 +40,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { - tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" data <- if (fullMeshRequest.meshFileName.isDefined) loadFullMeshFromMeshfile(annotationId, tracingId, tracing, fullMeshRequest) else loadFullMeshFromAdHoc(annotationId, tracingId, tracing, fullMeshRequest) @@ -98,7 +98,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, voxelSize: VoxelSize, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Array[Float]]] = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId) + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) mappingName <- annotationService.baseMappingName(annotationId, tracingId, tracing) bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index 82109a461a5..cede715f841 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -58,9 +58,9 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci private def getTypedDataForBucketPosition(annotationId: String, tracingId: String)( bucketPosition: Vec3Int, mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext) = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit tc: TokenContext, ec: ExecutionContext) = for { - tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" bucketData <- getVolumeDataForPositions(annotationId, tracingId, tracing, @@ -79,8 +79,8 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci segmentId: Long, mag: Vec3Int)(implicit ec: ExecutionContext, tc: TokenContext) = for { - fallbackLayer <- volumeTracingService.getFallbackLayer(annotationId, tracingId) - tracing <- volumeTracingService.find(annotationId, tracingId) ?~> "tracing.notFound" + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> "tracing.notFound" + fallbackLayer <- volumeTracingService.getFallbackLayer(tracingId, tracing) additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) allBucketPositions: ListOfVec3IntProto <- volumeSegmentIndexService .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 5a85c53a8f8..1b25a1c7815 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -82,16 +82,16 @@ trait VolumeTracingDownsampling annotationId: String, tracingId: String, oldTracingId: String, - tracing: VolumeTracing, + newTracing: VolumeTracing, dataLayer: VolumeTracingLayer, tracingService: VolumeTracingService)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Vec3Int]] = { val bucketVolume = 32 * 32 * 32 for { - _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(tracing.resolutions.nonEmpty) ?~> "Cannot downsample tracing with no resolution list" - sourceMag = getSourceMag(tracing) - magsToCreate <- getMagsToCreate(tracing, oldTracingId) - elementClass = elementClassFromProto(tracing.elementClass) + _ <- bool2Fox(newTracing.version == 0L) ?~> "Tracing has already been edited." + _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "Cannot downsample tracing with no resolution list" + sourceMag = getSourceMag(newTracing) + magsToCreate <- getMagsToCreate(newTracing, oldTracingId) + elementClass = elementClassFromProto(newTracing.elementClass) bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => revertedValue) _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) originalBucketPositions = bucketDataMapMutable.keys.toList @@ -107,28 +107,27 @@ trait VolumeTracingDownsampling dataLayer) requiredMag } - fallbackLayer <- tracingService.getFallbackLayer(annotationId, oldTracingId) // remote wk does not know the new id yet - tracing <- tracingService.find(annotationId, tracingId) ?~> "tracing.notFound" + fallbackLayer <- tracingService.getFallbackLayer(oldTracingId, newTracing) // remote wk does not know the new id yet segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, - tracing.version, + newTracing.version, tracingService.remoteDatastoreClient, fallbackLayer, dataLayer.additionalAxes, tc) _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => for { - _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) - mappingName <- selectMappingName(tracing) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), newTracing.version) + mappingName <- selectMappingName(newTracing) + _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, bucketDataMapMutable(bucketPosition), Empty, - tracing.elementClass, + newTracing.elementClass, mappingName, - editableMappingTracingId(tracing, tracingId) + editableMappingTracingId(newTracing, tracingId) )) } yield () } @@ -259,14 +258,16 @@ trait VolumeTracingDownsampling private def getSourceMag(tracing: VolumeTracing): Vec3Int = tracing.resolutions.minBy(_.maxDim) - private def getMagsToCreate(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = + private def getMagsToCreate(tracing: VolumeTracing, oldTracingId: String)( + implicit tc: TokenContext): Fox[List[Vec3Int]] = for { requiredMags <- getRequiredMags(tracing, oldTracingId) sourceMag = getSourceMag(tracing) magsToCreate = requiredMags.filter(_.maxDim > sourceMag.maxDim) } yield magsToCreate - private def getRequiredMags(tracing: VolumeTracing, oldTracingId: String): Fox[List[Vec3Int]] = + private def getRequiredMags(tracing: VolumeTracing, oldTracingId: String)( + implicit tc: TokenContext): Fox[List[Vec3Int]] = for { dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSourceForTracing(oldTracingId) magsForTracing = VolumeTracingDownsampling.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index bf8b16b1bb3..1315db091ff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -23,7 +23,7 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat @@ -56,7 +56,6 @@ class VolumeTracingService @Inject()( val uncommittedUpdatesStore: TracingStoreRedisStore, val temporaryTracingIdStore: TracingStoreRedisStore, val remoteDatastoreClient: TSRemoteDatastoreClient, - val annotationService: TSAnnotationService, val remoteWebknossosClient: TSRemoteWebknossosClient, val temporaryFileCreator: TemporaryFileCreator, val tracingMigrationService: VolumeTracingMigrationService, @@ -89,8 +88,8 @@ class VolumeTracingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - private val fallbackLayerCache: AlfuCache[(String, String, Option[String]), Option[RemoteFallbackLayer]] = AlfuCache( - maxCapacity = 100) + private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = + AlfuCache(maxCapacity = 100) override protected def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, @@ -108,15 +107,14 @@ class VolumeTracingService @Inject()( mappingName, editableMappingTracingId) ?~> "volumeSegmentIndex.update.failed" - def applyBucketMutatingActions(annotationId: String, + def applyBucketMutatingActions(tracingId: String, + tracing: VolumeTracing, updateActions: List[BucketMutatingVolumeUpdateAction], newVersion: Long)(implicit tc: TokenContext): Fox[Unit] = for { // warning, may be called multiple times with the same version number (due to transaction management). // frontend ensures that each bucket is only updated once per transaction - tracingId <- updateActions.headOption.map(_.actionTracingId).toFox - fallbackLayerOpt <- getFallbackLayer(annotationId, tracingId) - tracing <- find(annotationId, tracingId) ?~> "tracing.notFound" + fallbackLayerOpt <- getFallbackLayer(tracingId, tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -137,7 +135,7 @@ class VolumeTracingService @Inject()( if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") } else - deleteSegmentData(annotationId, tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to delete segment data." + deleteSegmentData(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to delete segment data." case _ => Fox.failure("Unknown bucket-mutating action.") } _ <- segmentIndexBuffer.flush() @@ -176,24 +174,6 @@ class VolumeTracingService @Inject()( } } yield volumeTracing - def find(annotationId: String, - tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, - applyUpdates: Boolean = false)(implicit tc: TokenContext): Fox[VolumeTracing] = - if (tracingId == TracingIds.dummyTracingId) - Fox.successful(dummyTracing) - else { - for { - annotation <- annotationService.getWithTracings(annotationId, - version, - List.empty, - List(tracingId), - requestAll = false) // TODO is applyUpdates still needed? - tracing <- annotation.getVolume(tracingId) - } yield tracing - } - override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None @@ -202,8 +182,7 @@ class VolumeTracingService @Inject()( Fox.failure("mappingName called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) - private def deleteSegmentData(annotationId: String, - tracingId: String, + private def deleteSegmentData(tracingId: String, volumeTracing: VolumeTracing, a: DeleteSegmentDataVolumeAction, segmentIndexBuffer: VolumeSegmentIndexBuffer, @@ -211,6 +190,7 @@ class VolumeTracingService @Inject()( for { _ <- Fox.successful(()) dataLayer = volumeTracingLayer(tracingId, volumeTracing) + fallbackLayer <- getFallbackLayer(tracingId, volumeTracing) possibleAdditionalCoordinates = AdditionalAxis.coordinateSpace(dataLayer.additionalAxes).map(Some(_)) additionalCoordinateList = if (possibleAdditionalCoordinates.isEmpty) { List(None) @@ -222,7 +202,6 @@ class VolumeTracingService @Inject()( Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(resolution) for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId) bucketPositionsRaw <- volumeSegmentIndexService.getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer( fallbackLayer, tracingId, @@ -374,7 +353,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) destinationDataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -415,7 +394,7 @@ class VolumeTracingService @Inject()( val dataLayer = volumeTracingLayer(tracingId, tracing) val savedResolutions = new mutable.HashSet[Vec3Int]() for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, @@ -522,7 +501,7 @@ class VolumeTracingService @Inject()( val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) val tracingWithResolutionRestrictions = restrictMagList(tracingWithBB, resolutionRestrictions) for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, sourceTracing) hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithResolutionRestrictions.copy( createdTimestamp = System.currentTimeMillis(), @@ -568,7 +547,7 @@ class VolumeTracingService @Inject()( sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(annotationId, sourceId) + fallbackLayer <- getFallbackLayer(sourceId, sourceTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( destinationId, volumeSegmentIndexClient, @@ -628,16 +607,16 @@ class VolumeTracingService @Inject()( toCache) } yield id - def downsample(annotationId: String, tracingId: String, oldTracingId: String, tracing: VolumeTracing)( + def downsample(annotationId: String, tracingId: String, oldTracingId: String, newTracing: VolumeTracing)( implicit tc: TokenContext): Fox[Unit] = for { resultingResolutions <- downsampleWithLayer(annotationId, tracingId, oldTracingId, - tracing, - volumeTracingLayer(tracingId, tracing), + newTracing, + volumeTracingLayer(tracingId, newTracing), this) - _ <- updateResolutionList(tracingId, tracing, resultingResolutions.toSet) + _ <- updateResolutionList(tracingId, newTracing, resultingResolutions.toSet) } yield () def volumeBucketsAreEmpty(tracingId: String): Boolean = @@ -660,9 +639,9 @@ class VolumeTracingService @Inject()( adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } - def findData(annotationId: String, tracingId: String)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = + def findData(tracingId: String, tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { - tracing <- find(annotationId: String, tracingId) ?~> "tracing.notFound" + _ <- Fox.successful(()) volumeLayer = volumeTracingLayer(tracingId, tracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { @@ -807,7 +786,7 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer("dummyAnnotationId", tracingSelectors.head.tracingId) // TODO annotation id from selectors + fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId, tracings.head) // TODO can we get rid of the head? segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, @@ -843,7 +822,7 @@ class VolumeTracingService @Inject()( isTemporaryTracing <- isTemporaryTracing(tracingId) sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) mappingName <- selectMappingName(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, @@ -919,7 +898,7 @@ class VolumeTracingService @Inject()( mergedVolume.largestSegmentId.toLong, tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) mappingName <- selectMappingName(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, @@ -983,18 +962,17 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot merge tracings with and without editable mappings") } - def getFallbackLayer(annotationId: String, tracingId: String)( + def getFallbackLayer(tracingId: String, tracing: VolumeTracing)( implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = - fallbackLayerCache.getOrLoad((annotationId, tracingId, tc.userTokenOpt), + fallbackLayerCache.getOrLoad((tracingId, tracing.fallbackLayer, tc.userTokenOpt), t => getFallbackLayerFromWebknossos(t._1, t._2)) - private def getFallbackLayerFromWebknossos(annotationId: String, tracingId: String)(implicit tc: TokenContext) = + private def getFallbackLayerFromWebknossos(tracingId: String, fallbackLayerName: Option[String])( + implicit tc: TokenContext) = Fox[Option[RemoteFallbackLayer]] { for { - tracing <- find(annotationId, tracingId) dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) dataSourceId = dataSource.id - fallbackLayerName = tracing.fallbackLayer fallbackLayer = dataSource.dataLayers .find(_.name == fallbackLayerName.getOrElse("")) .map(RemoteFallbackLayer.fromDataLayerAndDataSource(_, dataSourceId)) From 163354c761107199a75c5acb6ed2f02e7056914e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 8 Oct 2024 15:07:29 +0200 Subject: [PATCH 094/361] revert volume buckets. TODO: lz4 exception? --- .../annotation/AnnotationReversion.scala | 17 ++++++++++++---- .../annotation/TSAnnotationService.scala | 16 +++++++-------- .../volume/VolumeTracingService.scala | 20 ++++++++----------- 3 files changed, 29 insertions(+), 24 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index c26f5bf7bfc..bb5bb384602 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -2,13 +2,15 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeSegmentIndexBuffer +import com.scalableminds.util.tools.Fox.box2Fox +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import scala.concurrent.ExecutionContext trait AnnotationReversion { + def volumeTracingService: VolumeTracingService + def revertDistributedElements(annotationId: String, currentAnnotationWithTracings: AnnotationWithTracings, sourceAnnotationWithTracings: AnnotationWithTracings, @@ -18,8 +20,15 @@ trait AnnotationReversion { for { _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { // Only volume data for volume layers present in the *source annotation* needs to be reverted. - case (tracingId, sourceTracing) => Fox.successful(()) - //revertVolumeData(annotationId, tracingId, sourceTracing, revertAction.sourceVersion, newVersion) + case (tracingId, sourceTracing) => + for { + tracingBeforeRevert <- currentAnnotationWithTracings.getVolume(tracingId).toFox + _ <- volumeTracingService.revertVolumeData(tracingId, + revertAction.sourceVersion, + sourceTracing, + newVersion: Long, + tracingBeforeRevert) + } yield () } } yield () diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 567e9d47c0a..64b05098ecc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -51,7 +51,7 @@ import scala.concurrent.ExecutionContext class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient, editableMappingService: EditableMappingService, - volumeTracingService: VolumeTracingService, + val volumeTracingService: VolumeTracingService, skeletonTracingService: SkeletonTracingService, val remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) @@ -122,18 +122,18 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case a: RevertToVersionUpdateAction => - revertToVersion(annotationId, annotationWithTracings, a) + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO if the revert action is not isolated, we need not the target version of all but the target version of this update case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated - private def revertToVersion(annotationId: String, - annotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction)( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationWithTracings] = + private def revertToVersion( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = // Note: works only after “ironing out” the update action groups // TODO: read old annotationProto, tracing, buckets, segment indeces for { @@ -145,7 +145,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll = true) // TODO do we need to request the others? _ = logger.info( s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") - // _ <- revertDistributedElements(annotationId, annotationWithTracings, sourceAnnotation, revertAction) + _ <- revertDistributedElements(annotationId, annotationWithTracings, sourceAnnotation, revertAction, newVersion) } yield sourceAnnotation def createTracing(a: AddLayerAnnotationUpdateAction)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 1315db091ff..30b2df48631 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -250,19 +250,17 @@ class VolumeTracingService @Inject()( bool2Fox(mag.isIsotropic) } - /* - // TODO - private def revertToVolumeVersion(annotationId: String, - tracingId: String, - sourceVersion: Long, - newVersion: Long, - tracing: VolumeTracing)(implicit tc: TokenContext): Fox[VolumeTracing] = { + def revertVolumeData(tracingId: String, + sourceVersion: Long, + sourceTracing: VolumeTracing, + newVersion: Long, + tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() for { - fallbackLayer <- getFallbackLayer(annotationId, tracingId) + fallbackLayer <- getFallbackLayer(tracingId, tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, @@ -270,8 +268,7 @@ class VolumeTracingService @Inject()( fallbackLayer, dataLayer.additionalAxes, tc) - sourceTracing <- find(annotationId, tracingId, Some(sourceVersion)) - mappingName <- baseMappingName(sourceTracing) + mappingName <- selectMappingName(sourceTracing) _ <- Fox.serialCombined(bucketStream) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { @@ -310,9 +307,8 @@ class VolumeTracingService @Inject()( } else Fox.successful(()) } _ <- segmentIndexBuffer.flush() - } yield sourceTracing + } yield () } - */ def initializeWithDataMultiple(annotationId: String, tracingId: String, tracing: VolumeTracing, initialData: File)( implicit mp: MessagesProvider, From fe6257c601bb70b5b5c08bd97c42d5fcf2d0e1b1 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 9 Oct 2024 11:31:33 +0200 Subject: [PATCH 095/361] cache materialized with version --- .../tracingstore/TracingStoreModule.scala | 3 +- .../annotation/TSAnnotationService.scala | 50 +++++++++++++++---- 2 files changed, 41 insertions(+), 12 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index c0d36e2265c..49ac749ec1d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -4,7 +4,7 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder -import com.scalableminds.webknossos.tracingstore.annotation.AnnotationTransactionService +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -27,6 +27,7 @@ class TracingStoreModule extends AbstractModule { bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() bind(classOf[AnnotationTransactionService]).asEagerSingleton() + bind(classOf[TSAnnotationService]).asEagerSingleton() } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 64b05098ecc..bb43d1c6069 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} @@ -180,14 +181,43 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, requestAll = false) } yield withTracings.annotation - def getWithTracings( + private lazy val materializedAnnotationWithTracingCache = + // annotation id, version, requestedSkeletons, requestedVolumes, requestAll + // TODO instead of requested, use list of tracings determined from requests + updates? + AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings]() + + private def getWithTracings( annotationId: String, version: Option[Long], requestedSkeletonTracingIds: List[String], requestedVolumeTracingIds: List[String], requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - annotationWithVersion <- tracingDataStore.annotations.get(annotationId, version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + targetVersion <- determineTargetVersion(annotationId, version) ?~> "determineTargetVersion.failed" + updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( + (annotationId, targetVersion, requestedSkeletonTracingIds, requestedVolumeTracingIds, requestAll), + _ => + getWithTracingsVersioned( + annotationId, + targetVersion, + requestedSkeletonTracingIds, + requestedVolumeTracingIds, + requestAll = true) // TODO can we request fewer to save perf? still need to avoid duplicate apply + ) + } yield updatedAnnotation + + private def getWithTracingsVersioned( + annotationId: String, + version: Long, + requestedSkeletonTracingIds: List[String], + requestedVolumeTracingIds: List[String], + requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + for { + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( + fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + _ = logger.info( + s"cache miss for ${annotationId} v$version, requested ${requestedSkeletonTracingIds.mkString(",")} + ${requestedVolumeTracingIds + .mkString(",")} (requestAll=$requestAll). Applying updates from ${annotationWithVersion.version} to $version...") annotation = annotationWithVersion.value updated <- applyPendingUpdates(annotation, annotationId, @@ -225,12 +255,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def applyPendingUpdates( annotation: AnnotationProto, annotationId: String, - targetVersionOpt: Option[Long], + targetVersion: Long, requestedSkeletonTracingIds: List[String], requestedVolumeTracingIds: List[String], requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - targetVersion <- determineTargetVersion(annotation, annotationId, targetVersionOpt) ?~> "determineTargetVersion.failed" updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" annotationWithTracings <- findTracingsForUpdates(annotation, updates, @@ -371,7 +400,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updated <- updateIter(Some(annotation), updates) updatedWithNewVerson = updated.withVersion(targetVersion) - _ = logger.info(s"flushing, with ${updated.skeletonStats}") + _ = logger.info(s"flushing v${targetVersion}, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) @@ -406,18 +435,17 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) - private def determineTargetVersion(annotation: AnnotationProto, - annotationId: String, - targetVersionOpt: Option[Long]): Fox[Long] = + private def determineTargetVersion(annotationId: String, targetVersionOpt: Option[Long]): Fox[Long] = /* * Determines the newest saved version from the updates column. * if there are no updates at all, assume annotation is brand new (possibly created from NML, * hence the emptyFallbck annotation.version) */ for { - newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, - mayBeEmpty = Some(true), - emptyFallback = Some(annotation.version)) + newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion( + annotationId, + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) // TODO in case of empty, look in annotation table, take version from there } yield { targetVersionOpt match { case None => newestUpdateVersion From 2d376402e27b2a04cd8e0ada9a4b1507bb07546d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Oct 2024 09:34:07 +0200 Subject: [PATCH 096/361] revert proofreading distributed elements --- .../tracingstore/annotation/AnnotationReversion.scala | 11 ++++++++++- .../annotation/AnnotationWithTracings.scala | 4 ++-- .../editablemapping/EditableMappingUpdater.scala | 10 ++-------- 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index bb5bb384602..5e5deb0f035 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.Fox.box2Fox +import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import scala.concurrent.ExecutionContext @@ -28,8 +28,17 @@ trait AnnotationReversion { sourceTracing, newVersion: Long, tracingBeforeRevert) + _ <- Fox.runIf(sourceTracing.getHasEditableMapping)( + revertEditableMappingFields(currentAnnotationWithTracings, revertAction, tracingId)) } yield () } } yield () + private def revertEditableMappingFields(currentAnnotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionUpdateAction, + tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + for { + updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox + _ <- updater.revertToVersion(revertAction) + } yield () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 42c41169f3d..a2094bfa70d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -71,7 +71,7 @@ case class AnnotationWithTracings( (info, _) <- editableMappingsByTracingId.get(tracingId) } yield info - private def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = + def getEditableMappingUpdater(tracingId: String): Option[EditableMappingUpdater] = for { (_, updater) <- editableMappingsByTracingId.get(tracingId) } yield updater @@ -130,7 +130,7 @@ case class AnnotationWithTracings( def applyEditableMappingAction(a: EditableMappingUpdateAction)( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox // TODO editable mapping update actions need tracing id + updater: EditableMappingUpdater <- getEditableMappingUpdater(a.actionTracingId).toFox info <- getEditableMappingInfo(a.actionTracingId).toFox updated <- updater.applyOneUpdate(info, a) } yield diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 715a3055984..09b2e04f031 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -128,8 +128,6 @@ class EditableMappingUpdater( applySplitAction(mapping, splitAction) ?~> "Failed to apply split action" case mergeAction: MergeAgglomerateUpdateAction => applyMergeAction(mapping, mergeAction) ?~> "Failed to apply merge action" - case revertAction: RevertToVersionUpdateAction => - revertToVersion(revertAction) ?~> "Failed to apply revert action" case _ => Fox.failure("this is not an editable mapping update action!") } @@ -420,13 +418,9 @@ class EditableMappingUpdater( ) } - private def revertToVersion(revertAction: RevertToVersionUpdateAction)( - implicit ec: ExecutionContext): Fox[EditableMappingInfo] = + def revertToVersion(revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" - oldInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, Some(revertAction.sourceVersion))( - ec, - tokenContext) _ = segmentToAgglomerateBuffer.clear() _ = agglomerateToGraphBuffer.clear() segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( @@ -466,6 +460,6 @@ class EditableMappingUpdater( } yield () } else Fox.successful(()) } - } yield oldInfo + } yield () } From 269e3500ea6182a67fe1ba3729cc491e7710d989 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Oct 2024 09:45:52 +0200 Subject: [PATCH 097/361] fix volume revert logic --- .../annotation/AnnotationReversion.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index 5e5deb0f035..b85e053366d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -23,11 +23,12 @@ trait AnnotationReversion { case (tracingId, sourceTracing) => for { tracingBeforeRevert <- currentAnnotationWithTracings.getVolume(tracingId).toFox - _ <- volumeTracingService.revertVolumeData(tracingId, - revertAction.sourceVersion, - sourceTracing, - newVersion: Long, - tracingBeforeRevert) + _ <- Fox.runIf(!sourceTracing.getHasEditableMapping)( + volumeTracingService.revertVolumeData(tracingId, + revertAction.sourceVersion, + sourceTracing, + newVersion: Long, + tracingBeforeRevert)) _ <- Fox.runIf(sourceTracing.getHasEditableMapping)( revertEditableMappingFields(currentAnnotationWithTracings, revertAction, tracingId)) } yield () @@ -40,5 +41,6 @@ trait AnnotationReversion { for { updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox _ <- updater.revertToVersion(revertAction) + _ <- updater.flushBuffersToFossil() } yield () } From 50d9dc4a0e9fabb1cf06ce0c0e7690e4edfb87d2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Oct 2024 15:38:21 +0200 Subject: [PATCH 098/361] wip fix parallel revert --- .../annotation/TSAnnotationService.scala | 12 +++++------- .../volume/VolumeSegmentIndexService.scala | 4 +++- .../volume/VolumeTracingBucketHelper.scala | 2 +- .../tracings/volume/VolumeTracingService.scala | 17 ++++++++++------- 4 files changed, 19 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index bb43d1c6069..614f3b4a2cf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -62,6 +62,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss with AnnotationReversion with LazyLogging { + private lazy val materializedAnnotationWithTracingCache = + // annotation id, version, requestedSkeletons, requestedVolumes, requestAll + // TODO instead of requested, use list of tracings determined from requests + updates? + AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings](maxCapacity = 1) + def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = for { _ <- remoteWebknossosClient.reportTracingUpdates( @@ -181,11 +186,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, requestAll = false) } yield withTracings.annotation - private lazy val materializedAnnotationWithTracingCache = - // annotation id, version, requestedSkeletons, requestedVolumes, requestAll - // TODO instead of requested, use list of tracings determined from requests + updates? - AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings]() - private def getWithTracings( annotationId: String, version: Option[Long], @@ -388,8 +388,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remainingUpdates match { case List() => Fox.successful(annotationWithTracings) case update :: tail => - logger.info( - f"${remainingUpdates.length} remainingUpdates, current skeleton ${annotationWithTracings.skeletonStats})") updateIter(applyUpdate(annotationId, annotationWithTracings, update, targetVersion), tail) } case _ => annotationWithTracingsFox diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index a88c863f7ff..b04ce318747 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -61,7 +61,9 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { bucketBytesDecompressed <- tryo( - decompressIfNeeded(bucketBytes, expectedUncompressedBucketSizeFor(elementClass), "")).toFox + decompressIfNeeded(bucketBytes, + expectedUncompressedBucketSizeFor(elementClass), + "updating segment index, new bucket data")).toFox // previous bytes: include fallback layer bytes if available, otherwise use empty bytes previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.update.getPreviousBucket.failed" segmentIds: Set[Long] <- collectSegmentIds(bucketBytesDecompressed, elementClass) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 0f3e8d06d5c..bec916c2bb5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -50,7 +50,7 @@ trait VolumeBucketCompression extends LazyLogging { } catch { case e: Exception => logger.error( - s"Failed to LZ4-decompress volume bucket ($debugInfo, expected uncompressed size $expectedUncompressedBucketSize): $e") + s"Failed to LZ4-decompress volume bucket ($debugInfo, compressed size: ${data.length}, expected uncompressed size $expectedUncompressedBucketSize): $e") throw e } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 30b2df48631..9c0ae6d7174 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -254,13 +254,16 @@ class VolumeTracingService @Inject()( sourceVersion: Long, sourceTracing: VolumeTracing, newVersion: Long, - tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + tracingBeforeRevert: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { - val dataLayer = volumeTracingLayer(tracingId, tracing) - val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() + val dataLayer = volumeTracingLayer(tracingId, tracingBeforeRevert) + val bucketStreamBeforeRevert = + dataLayer.volumeBucketProvider.bucketStreamWithVersion(version = Some(tracingBeforeRevert.version)) + + logger.info(s"reverting volume data from v${tracingBeforeRevert.version} to v$sourceVersion, creating v$newVersion") for { - fallbackLayer <- getFallbackLayer(tracingId, tracing) + fallbackLayer <- getFallbackLayer(tracingId, tracingBeforeRevert) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, newVersion, @@ -269,14 +272,14 @@ class VolumeTracingService @Inject()( dataLayer.additionalAxes, tc) mappingName <- selectMappingName(sourceTracing) - _ <- Fox.serialCombined(bucketStream) { + _ <- Fox.serialCombined(bucketStreamBeforeRevert) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { loadBucket(dataLayer, bucketPosition, Some(sourceVersion)).futureBox.map { case Full(dataAfterRevert) => for { _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -291,7 +294,7 @@ class VolumeTracingService @Inject()( for { dataAfterRevert <- Fox.successful(revertedValue) _ <- saveBucket(dataLayer, bucketPosition, dataAfterRevert, newVersion) - _ <- Fox.runIfOptionTrue(tracing.hasSegmentIndex)( + _ <- Fox.runIfOptionTrue(tracingBeforeRevert.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, From e0e4dc4dd6abdaea306caf9f0b08291b6bdd993b Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Oct 2024 15:47:52 +0200 Subject: [PATCH 099/361] in bucket data reversion, handle revertedValue --- .../annotation/TSAnnotationService.scala | 2 +- .../volume/VolumeSegmentIndexService.scala | 18 +++++++++++++----- 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 614f3b4a2cf..9c55671815f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -65,7 +65,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private lazy val materializedAnnotationWithTracingCache = // annotation id, version, requestedSkeletons, requestedVolumes, requestAll // TODO instead of requested, use list of tracings determined from requests + updates? - AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings](maxCapacity = 1) + AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings](maxCapacity = 1000) def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala index b04ce318747..4416f678d9a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentIndexService.scala @@ -44,6 +44,7 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore with ProtoGeometryImplicits with VolumeBucketCompression with SegmentIndexKeyHelper + with ReversionHelper with LazyLogging { private val volumeSegmentIndexClient: FossilDBClient = tracingDataStore.volumeSegmentIndex @@ -60,10 +61,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore mappingName: Option[String], editableMappingTracingId: Option[String])(implicit ec: ExecutionContext): Fox[Unit] = for { - bucketBytesDecompressed <- tryo( - decompressIfNeeded(bucketBytes, - expectedUncompressedBucketSizeFor(elementClass), - "updating segment index, new bucket data")).toFox + bucketBytesDecompressed <- if (isRevertedElement(bucketBytes)) { + Fox.successful(emptyArrayForElementClass(elementClass)) + } else { + tryo( + decompressIfNeeded(bucketBytes, + expectedUncompressedBucketSizeFor(elementClass), + "updating segment index, new bucket data")).toFox + } // previous bytes: include fallback layer bytes if available, otherwise use empty bytes previousBucketBytesWithEmptyFallback <- bytesWithEmptyFallback(previousBucketBytesBox, elementClass) ?~> "volumeSegmentIndex.update.getPreviousBucket.failed" segmentIds: Set[Long] <- collectSegmentIds(bucketBytesDecompressed, elementClass) @@ -90,11 +95,14 @@ class VolumeSegmentIndexService @Inject()(val tracingDataStore: TracingDataStore private def bytesWithEmptyFallback(bytesBox: Box[Array[Byte]], elementClass: ElementClassProto)( implicit ec: ExecutionContext): Fox[Array[Byte]] = bytesBox match { - case Empty => Fox.successful(Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0)) + case Empty => Fox.successful(emptyArrayForElementClass(elementClass)) case Full(bytes) => Fox.successful(bytes) case f: Failure => f.toFox } + private def emptyArrayForElementClass(elementClass: ElementClassProto): Array[Byte] = + Array.fill[Byte](ElementClass.bytesPerElement(elementClass))(0) + private def removeBucketFromSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, segmentId: Long, From 8ea36da96799ca60047bc756ecf26ce4e119beef Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 15 Oct 2024 09:45:39 +0200 Subject: [PATCH 100/361] split and isolate function --- .../scala/collections/SequenceUtils.scala | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index 9d839141c7d..d047e352c3f 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -1,9 +1,40 @@ package collections +import scala.annotation.tailrec + object SequenceUtils { def findUniqueElement[T](list: Seq[T]): Option[T] = { val uniqueElements = list.distinct if (uniqueElements.length == 1) uniqueElements.headOption else None } + + /* + Split a list into n parts, isolating the elements that satisfy the given predicate. + Those elements will be in single-item lists + Example: + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i == 4) + → List(List(1, 2, 3), List(4), List(5, 6, 7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i % 3 == 0) + → List(List(1, 2), List(3), List(4, 5), List(6), List(7)) + splitAndIsolate(List(1,2,3,4,5,6,7))(i => i > 1000) # no matches → no splitting + → List(List(1, 2, 3, 4, 5, 6, 7)) + splitAndIsolate(List())(i => true) # empty list stays empty + → List() + */ + def splitAndIsolate[T](list: List[T])(predicate: T => Boolean): List[List[T]] = + list + .foldLeft(List[List[T]]()) { (acc, item) => + if (predicate(item)) { + List.empty :: List(item) :: acc + } else { + acc match { + case head :: tail => (item :: head) :: tail + case Nil => List(List(item)) + } + } + } + .reverse // we prepended on the outer list (for perf reasons) + .map(_.reverse) // we prepended on the inner lists (for perf reasons) + } From 5bdce21a98d54d6208b80624a74898a4ffe887d0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 15 Oct 2024 10:12:39 +0200 Subject: [PATCH 101/361] WIP: Unified Annotation Versioning: regroup update groups by revert actions --- .../scala/collections/SequenceUtils.scala | 2 - .../annotation/TSAnnotationService.scala | 54 +++++++++++++------ 2 files changed, 39 insertions(+), 17 deletions(-) diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index d047e352c3f..a584ae2923d 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -1,7 +1,5 @@ package collections -import scala.annotation.tailrec - object SequenceUtils { def findUniqueElement[T](list: Seq[T]): Option[T] = { val uniqueElements = list.distinct diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 9c55671815f..5c47e2324c4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import collections.SequenceUtils import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant @@ -87,7 +88,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( - implicit ec: ExecutionContext): Fox[List[UpdateAction]] = + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = if (desiredVersion == existingVersion) Fox.successful(List()) else { for { @@ -95,8 +96,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId, Some(desiredVersion), Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) - updateActionGroupsWithVersionsIroned = ironOutReversionFolds(updateActionGroupsWithVersions) - } yield updateActionGroupsWithVersionsIroned + updateActionGroupsWithVersionsRegrouped = regroupByRevertActions(updateActionGroupsWithVersions) + } yield updateActionGroupsWithVersionsRegrouped } // TODO option to dry apply? @@ -260,18 +261,20 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestedVolumeTracingIds: List[String], requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - updates <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" + updatesRegrouped <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" + updatesFlat = updatesRegrouped.flatMap(_._2) annotationWithTracings <- findTracingsForUpdates(annotation, - updates, + updatesFlat, requestedSkeletonTracingIds, requestedVolumeTracingIds, requestAll) ?~> "findTracingsForUpdates.failed" - annotationWithTracingsAndMappings <- findEditableMappingsForUpdates(annotationId, - annotationWithTracings, - updates, - annotation.version, - targetVersion) - updated <- applyUpdates(annotationWithTracingsAndMappings, annotationId, updates, targetVersion) ?~> "applyUpdates.inner.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForUpdates( + annotationId, + annotationWithTracings, + updatesFlat, + annotation.version, + targetVersion) // TODO: targetVersion should be set per update group + updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesRegrouped) ?~> "applyUpdates.inner.failed" } yield updated private def findEditableMappingsForUpdates( // TODO integrate with findTracings? @@ -374,6 +377,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) } + private def applyUpdatesGrouped( + annotation: AnnotationWithTracings, + annotationId: String, + updates: List[(Long, List[UpdateAction])] + ): Fox[AnnotationWithTracings] = ??? + private def applyUpdates( annotation: AnnotationWithTracings, annotationId: String, @@ -406,10 +415,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - private def ironOutReversionFolds( - updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[UpdateAction] = - // TODO: if the source version is in the current update list, it needs to be ironed out. in case of overlaps, iron out from the back. - updateActionGroupsWithVersions.reverse.flatMap(_._2) + private def regroupByRevertActions( + updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { + val splitGroupLists: List[List[(Long, List[UpdateAction])]] = + SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => + actionGroup._2.contains(updateAction => isRevertAction(updateAction))) + // TODO assert that the groups that contain revert actions contain nothing else + // TODO test this + + splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => + val updates = groupsToConcatenate.flatMap(_._2) + val targetVersionOpt: Option[Long] = groupsToConcatenate.map(_._1).headOption + targetVersionOpt.map(targetVersion => (targetVersion, updates)) + } + } + + private def isRevertAction(a: UpdateAction): Boolean = a match { + case _: RevertToVersionUpdateAction => true + case _ => false + } private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) From 3f48feae28cd635482364b2d883a9892b2c6ead6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 15 Oct 2024 10:22:34 +0200 Subject: [PATCH 102/361] wip apply regrouped updates --- .../annotation/AnnotationWithTracings.scala | 7 ++++++ .../annotation/TSAnnotationService.scala | 22 ++++++++++++++++--- .../EditableMappingUpdater.scala | 1 + 3 files changed, 27 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index a2094bfa70d..5b02bb451b8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -106,6 +106,13 @@ case class AnnotationWithTracings( this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) } + def withTargetVersion(targetVersion: Long): AnnotationWithTracings = { + val editableMappingsUpdated = editableMappingsByTracingId.view.mapValues { + case (mapping, updater) => (mapping, updater.withTargetVersion(targetVersion)) + } + this.copy(editableMappingsByTracingId = editableMappingsUpdated.toMap) + } + def addEditableMapping(volumeTracingId: String, editableMappingInfo: EditableMappingInfo, updater: EditableMappingUpdater): AnnotationWithTracings = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 5c47e2324c4..4f7f32dc975 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -380,8 +380,24 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def applyUpdatesGrouped( annotation: AnnotationWithTracings, annotationId: String, - updates: List[(Long, List[UpdateAction])] - ): Fox[AnnotationWithTracings] = ??? + updateGroups: List[(Long, List[UpdateAction])] + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + def updateGroupedIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], + remainingUpdateGroups: List[(Long, List[UpdateAction])]): Fox[AnnotationWithTracings] = + annotationWithTracingsFox.futureBox.flatMap { + case Empty => Fox.empty + case Full(annotationWithTracings) => + remainingUpdateGroups match { + case List() => Fox.successful(annotationWithTracings) + case updateGroup :: tail => + updateGroupedIter(applyUpdates(annotationWithTracings, annotationId, updateGroup._2, updateGroup._1), + tail) + } + case _ => annotationWithTracingsFox + } + + updateGroupedIter(Some(annotation), updateGroups) + } private def applyUpdates( annotation: AnnotationWithTracings, @@ -405,7 +421,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss if (updates.isEmpty) Full(annotation) else { for { - updated <- updateIter(Some(annotation), updates) + updated <- updateIter(Some(annotation.withTargetVersion(targetVersion)), updates) updatedWithNewVerson = updated.withVersion(targetVersion) _ = logger.info(s"flushing v${targetVersion}, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 09b2e04f031..4acaa219889 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -462,4 +462,5 @@ class EditableMappingUpdater( } } yield () + def withTargetVersion(targetVersion: Long): EditableMappingUpdater = ??? // TODO build new or copy? } From e98510d911048dbb8fd619fc5c0148ee945e26d3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 15 Oct 2024 10:28:04 +0200 Subject: [PATCH 103/361] build new updater --- .../annotation/AnnotationWithTracings.scala | 4 ++-- .../annotation/TSAnnotationService.scala | 2 +- .../editablemapping/EditableMappingUpdater.scala | 16 +++++++++++++++- 3 files changed, 18 insertions(+), 4 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 5b02bb451b8..658cac9da17 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -106,9 +106,9 @@ case class AnnotationWithTracings( this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) } - def withTargetVersion(targetVersion: Long): AnnotationWithTracings = { + def withNewUpdaters(materializedVersion: Long, targetVersion: Long): AnnotationWithTracings = { val editableMappingsUpdated = editableMappingsByTracingId.view.mapValues { - case (mapping, updater) => (mapping, updater.withTargetVersion(targetVersion)) + case (mapping, updater) => (mapping, updater.newWithTargetVersion(materializedVersion, targetVersion)) } this.copy(editableMappingsByTracingId = editableMappingsUpdated.toMap) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 4f7f32dc975..31d604f8fc3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -421,7 +421,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss if (updates.isEmpty) Full(annotation) else { for { - updated <- updateIter(Some(annotation.withTargetVersion(targetVersion)), updates) + updated <- updateIter(Some(annotation.withNewUpdaters(annotation.version, targetVersion)), updates) updatedWithNewVerson = updated.withVersion(targetVersion) _ = logger.info(s"flushing v${targetVersion}, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 4acaa219889..15f5fda72cc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -462,5 +462,19 @@ class EditableMappingUpdater( } } yield () - def withTargetVersion(targetVersion: Long): EditableMappingUpdater = ??? // TODO build new or copy? + def newWithTargetVersion(currentMaterializedVersion: Long, targetVersion: Long): EditableMappingUpdater = + new EditableMappingUpdater( + annotationId, + tracingId, + baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tokenContext, + remoteDatastoreClient, + editableMappingService, + annotationService, + tracingDataStore, + relyOnAgglomerateIds = relyOnAgglomerateIds + ) } From e098113e5e3b91bca1e2fff2c987ab5822b93274 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 15 Oct 2024 12:04:04 +0200 Subject: [PATCH 104/361] fix regroup --- .../UpdateGroupHandlingUnitTestSuite.scala | 39 +++++++++++++++++ .../AnnotationTransactionService.scala | 2 +- .../annotation/TSAnnotationService.scala | 43 ++++++------------- .../annotation/UpdateGroupHandling.scala | 31 +++++++++++++ .../tracings/TracingDataStore.scala | 1 + 5 files changed, 84 insertions(+), 32 deletions(-) create mode 100644 test/backend/UpdateGroupHandlingUnitTestSuite.scala create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala new file mode 100644 index 00000000000..bdc06c79f36 --- /dev/null +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -0,0 +1,39 @@ +package backend + +import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionUpdateAction, UpdateGroupHandling} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{MergeTreeSkeletonAction} +import org.scalatestplus.play.PlaySpec + +class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling { + + "regroup" should { + "work" in { + val updateGroupsBefore = List( + (5L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (6L, + List( + RevertToVersionUpdateAction(sourceVersion = 1), + )), + (7L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (8L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + ) + val res = regroupByRevertActions(updateGroupsBefore) + assert(res.length == 3) + assert(res(1)._2.length == 1) + assert(res(1)._1 == 6L) + } + } + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index e96ffd31641..33e81fcfe85 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -165,7 +165,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) _ = logger.info(s"trying to commit ${updateGroups .map(_.actions.length) - .sum} actions in ${updateGroups.length} groups (versions ${updateGroups.map(_.version).mkString(",")}") + .sum} actions in ${updateGroups.length} groups (versions ${updateGroups.map(_.version).mkString(",")})") newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 31d604f8fc3..fff6b7890bf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -1,6 +1,5 @@ package com.scalableminds.webknossos.tracingstore.annotation -import collections.SequenceUtils import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant @@ -61,6 +60,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss with FallbackDataHelper with ProtoGeometryImplicits with AnnotationReversion + with UpdateGroupHandling with LazyLogging { private lazy val materializedAnnotationWithTracingCache = @@ -91,13 +91,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = if (desiredVersion == existingVersion) Fox.successful(List()) else { - for { - updateActionGroupsWithVersions <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( - annotationId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) - updateActionGroupsWithVersionsRegrouped = regroupByRevertActions(updateActionGroupsWithVersions) - } yield updateActionGroupsWithVersionsRegrouped + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) } // TODO option to dry apply? @@ -261,8 +258,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestedVolumeTracingIds: List[String], requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - updatesRegrouped <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" - updatesFlat = updatesRegrouped.flatMap(_._2) + updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" + updatesGroupsRegrouped = regroupByRevertActions(updateGroupsAsSaved) + updatesFlat = updatesGroupsRegrouped.flatMap(_._2) annotationWithTracings <- findTracingsForUpdates(annotation, updatesFlat, requestedSkeletonTracingIds, @@ -274,7 +272,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updatesFlat, annotation.version, targetVersion) // TODO: targetVersion should be set per update group - updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesRegrouped) ?~> "applyUpdates.inner.failed" + updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped) ?~> "applyUpdates.inner.failed" } yield updated private def findEditableMappingsForUpdates( // TODO integrate with findTracings? @@ -405,6 +403,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updates: List[UpdateAction], targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + logger.info(s"applying ${updates.length} to go from v${annotation.version} to v$targetVersion") + + // TODO can we make this tail recursive? def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = annotationWithTracingsFox.futureBox.flatMap { @@ -431,26 +432,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - private def regroupByRevertActions( - updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { - val splitGroupLists: List[List[(Long, List[UpdateAction])]] = - SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => - actionGroup._2.contains(updateAction => isRevertAction(updateAction))) - // TODO assert that the groups that contain revert actions contain nothing else - // TODO test this - - splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => - val updates = groupsToConcatenate.flatMap(_._2) - val targetVersionOpt: Option[Long] = groupsToConcatenate.map(_._1).headOption - targetVersionOpt.map(targetVersion => (targetVersion, updates)) - } - } - - private def isRevertAction(a: UpdateAction): Boolean = a match { - case _: RevertToVersionUpdateAction => true - case _ => false - } - private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala new file mode 100644 index 00000000000..6b8f4cf0e04 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -0,0 +1,31 @@ +package com.scalableminds.webknossos.tracingstore.annotation + +import collections.SequenceUtils + +trait UpdateGroupHandling { + + def regroupByRevertActions( + updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { + val splitGroupLists: List[List[(Long, List[UpdateAction])]] = + SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => + actionGroup._2.exists(updateAction => isRevertAction(updateAction))) + // TODO assert that the groups that contain revert actions contain nothing else + // TODO test this + + splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => + concatenateUpdateActionGroups(groupsToConcatenate) + } + } + + private def concatenateUpdateActionGroups( + groups: List[(Long, List[UpdateAction])]): Option[(Long, List[UpdateAction])] = { + val updates = groups.flatMap(_._2) + val targetVersionOpt: Option[Long] = groups.map(_._1).lastOption + targetVersionOpt.map(targetVersion => (targetVersion, updates)) + } + + private def isRevertAction(a: UpdateAction): Boolean = a match { + case _: RevertToVersionUpdateAction => true + case _ => false + } +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala index e5e5d0ad777..76be451e007 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingDataStore.scala @@ -44,6 +44,7 @@ class TracingDataStore @Inject()(config: TracingStoreConfig, healthClient.shutdown() skeletons.shutdown() annotationUpdates.shutdown() + annotations.shutdown() volumes.shutdown() volumeData.shutdown() editableMappingsInfo.shutdown() From d5f1a62da96ef23019b0246c79cec782875b42a8 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 16 Oct 2024 13:36:37 +0200 Subject: [PATCH 105/361] delete annotation layer via update action --- .../WKRemoteTracingStoreController.scala | 23 +++++++++- app/models/analytics/AnalyticsService.scala | 1 + app/models/annotation/Annotation.scala | 9 +++- app/models/annotation/AnnotationService.scala | 4 +- conf/webknossos.latest.routes | 1 + .../models/annotation/AnnotationLayer.scala | 42 ++++++++++--------- .../annotation/AnnotationLayerType.scala | 6 +++ .../webknossos/datastore/rpc/RPCRequest.scala | 4 +- .../slacknotification/SlackClient.scala | 1 + .../TSRemoteWebknossosClient.scala | 7 ++++ .../AnnotationTransactionService.scala | 13 ++++++ .../annotation/AnnotationUpdateActions.scala | 4 +- .../annotation/TSAnnotationService.scala | 5 +++ .../annotation/UpdateActions.scala | 2 + 14 files changed, 95 insertions(+), 27 deletions(-) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index f3b488b8060..14eb8e2146b 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,6 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport import com.scalableminds.webknossos.tracingstore.tracings.TracingIds @@ -51,6 +52,26 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService + def updateAnnotationLayers(name: String, key: String, annotationId: String): Action[List[AnnotationLayer]] = + Action.async(validateJson[List[AnnotationLayer]]) { implicit request => + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) + existingLayerIds = existingLayers.map(_.tracingId).toSet + newLayerIds = request.body.map(_.tracingId).toSet + layerIdsToDelete = existingLayerIds.diff(newLayerIds) + layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) + layerIdsToInsert = newLayerIds.diff(existingLayerIds) + _ <- Fox.serialCombined(layerIdsToDelete.toList)( + annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) + _ <- Fox.serialCombined(request.body.filter(l => layerIdsToInsert.contains(l.tracingId)))( + annotationLayerDAO.insertOne(annotationIdValidated, _)) + _ <- Fox.serialCombined(request.body.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => + annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) + // Layer stats are ignored here, they are sent eagerly when saving updates + } yield Ok + } + def handleTracingUpdateReport(name: String, key: String): Action[TracingUpdatesReport] = Action.async(validateJson[TracingUpdatesReport]) { implicit request => implicit val ctx: DBAccessContext = GlobalAccessContext @@ -63,7 +84,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore _ <- annotationDAO.updateModified(annotation._id, Instant.now) /*_ <- Fox.runOptional(report.statistics) { statistics => annotationLayerDAO.updateStatistics(annotation._id, annotationId, statistics) - }*/ // TODO stats per tracing id + }*/ // TODO stats per tracing id. note: they might arrive before the layer is created. skip them then. userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges _ <- Fox.runOptional(userBox)(user => diff --git a/app/models/analytics/AnalyticsService.scala b/app/models/analytics/AnalyticsService.scala index 86b2cec45c1..83aba641736 100644 --- a/app/models/analytics/AnalyticsService.scala +++ b/app/models/analytics/AnalyticsService.scala @@ -55,6 +55,7 @@ class AnalyticsService @Inject()(rpc: RPC, } val wrappedJson = Json.obj("api_key" -> conf.key, "events" -> List(analyticsEventJson)) rpc(conf.uri).silent.postJson(wrappedJson) + () } Fox.successful(()) } diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 887bf7439c9..493bce4bdd4 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -140,13 +140,20 @@ class AnnotationLayerDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionC q"""INSERT INTO webknossos.annotation_layers(_annotation, tracingId, typ, name, statistics) VALUES($annotationId, ${a.tracingId}, ${a.typ}, ${a.name}, ${a.stats})""".asUpdate - def deleteOne(annotationId: ObjectId, layerName: String): Fox[Unit] = + def deleteOneByName(annotationId: ObjectId, layerName: String): Fox[Unit] = for { _ <- run(q"""DELETE FROM webknossos.annotation_layers WHERE _annotation = $annotationId AND name = $layerName""".asUpdate) } yield () + def deleteOneByTracingId(annotationId: ObjectId, tracingId: String): Fox[Unit] = + for { + _ <- run(q"""DELETE FROM webknossos.annotation_layers + WHERE _annotation = $annotationId + AND tracingId = $tracingId""".asUpdate) + } yield () + def findAnnotationIdByTracingId(tracingId: String): Fox[ObjectId] = for { rList <- run(q"SELECT _annotation FROM webknossos.annotation_layers WHERE tracingId = $tracingId".as[ObjectId]) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 26dffa21ad7..57327d2924e 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -215,7 +215,7 @@ class AnnotationService @Inject()( def deleteAnnotationLayer(annotation: Annotation, layerName: String): Fox[Unit] = for { - _ <- annotationLayersDAO.deleteOne(annotation._id, layerName) + _ <- annotationLayersDAO.deleteOneByName(annotation._id, layerName) } yield () private def createTracingsForExplorational(dataset: Dataset, @@ -314,7 +314,7 @@ class AnnotationService @Inject()( AnnotationLayer(tracingIdAndName._1, annotationLayerParameters.typ, tracingIdAndName._2, - AnnotationLayerStatistics.zeroedForTyp(annotationLayerParameters.typ)) + AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) def fetchOldPrecedenceLayer: Fox[Option[FetchedAnnotationLayer]] = if (existingAnnotationLayers.isEmpty) Fox.successful(None) diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 5b9ad059207..64fdcd470a4 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -121,6 +121,7 @@ PUT /datastores/:name # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() POST /tracingstores/:name/handleTracingUpdateReport controllers.WKRemoteTracingStoreController.handleTracingUpdateReport(name: String, key: String) +POST /tracingstores/:name/updateAnnotationLayers controllers.WKRemoteTracingStoreController.updateAnnotationLayers(name: String, key: String, annotationId: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, key: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index a6d0c65c8c5..1c8e827d31c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.models.annotation import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationLayerProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType @@ -17,28 +18,12 @@ case class AnnotationLayer( stats: JsObject, ) -object AnnotationLayerStatistics { - - def zeroedForTyp(typ: AnnotationLayerType): JsObject = typ match { - case AnnotationLayerType.Skeleton => - Json.obj( - "treeCount" -> 0, - "nodeCount" -> 0, - "edgeCount" -> 0, - "branchPointCount" -> 0 - ) - case AnnotationLayerType.Volume => - Json.obj( - "segmentCount" -> 0 - ) - } - - def unknown: JsObject = Json.obj() -} - object AnnotationLayer extends FoxImplicits { implicit val jsonFormat: OFormat[AnnotationLayer] = Json.format[AnnotationLayer] + def fromProto(p: AnnotationLayerProto): AnnotationLayer = + AnnotationLayer(p.tracingId, AnnotationLayerType.fromProto(p.`type`), p.name, AnnotationLayerStatistics.unknown) + val defaultSkeletonLayerName: String = "Skeleton" val defaultVolumeLayerName: String = "Volume" @@ -63,6 +48,25 @@ object AnnotationLayer extends FoxImplicits { } } +object AnnotationLayerStatistics { + + def zeroedForType(typ: AnnotationLayerType): JsObject = typ match { + case AnnotationLayerType.Skeleton => + Json.obj( + "treeCount" -> 0, + "nodeCount" -> 0, + "edgeCount" -> 0, + "branchPointCount" -> 0 + ) + case AnnotationLayerType.Volume => + Json.obj( + "segmentCount" -> 0 + ) + } + + def unknown: JsObject = Json.obj() +} + case class FetchedAnnotationLayer(tracingId: String, name: String, tracing: Either[SkeletonTracing, VolumeTracing], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 756180cbbd8..2f34d15b159 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -12,4 +12,10 @@ object AnnotationLayerType extends ExtendedEnumeration { case Skeleton => AnnotationLayerTypeProto.skeleton case Volume => AnnotationLayerTypeProto.volume } + + def fromProto(p: AnnotationLayerTypeProto): AnnotationLayerType = + p match { + case AnnotationLayerTypeProto.skeleton => Skeleton + case AnnotationLayerTypeProto.volume => Volume + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 4bd9e2872eb..ba8aee37c6d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -166,10 +166,10 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } - def postJson[J: Writes](body: J = Json.obj()): Unit = { + def postJson[J: Writes](body: J = Json.obj()): Fox[Unit] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") - performRequest + performRequest.map(_ => ()) } def postProto[T <: GeneratedMessage](body: T): Fox[Unit] = { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala index 99491288037..45f2d4e0766 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/slacknotification/SlackClient.scala @@ -45,6 +45,7 @@ class SlackClient(rpc: RPC, slackUri: String, name: String, verboseLoggingEnable rpc(slackUri).postJson( Json.obj("attachments" -> Json.arr(jsonMessage)) ) + () } else { logger.warn( s"Not sending slack notification as rate limit of $messagesSentSinceReset was reached. Message was: $jsonMessage") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 70852b533a6..66976f7fd24 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -5,6 +5,7 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ @@ -89,6 +90,12 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[String] ) ?~> "annotation.idForTracing.failed" + def updateAnnotationLayers(annotationId: String, annotationLayers: List[AnnotationLayer]): Fox[Unit] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotationLayers") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + .postJson(annotationLayers) + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 33e81fcfe85..c69dfc1fb9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -179,8 +179,21 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe } else failUnlessAlreadyHandled(updateGroup, annotationId, prevVersion) } } + _ <- applyImmediatelyIfNeeded(annotationId, updateGroups.flatMap(_.actions), newVersion) } yield newVersion + private def applyImmediatelyIfNeeded(annotationId: String, updates: List[UpdateAction], newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Unit] = + if (containsApplyImmediatelyUpdateActions(updates)) { + annotationService.get(annotationId, Some(newVersion)).map(_ => ()) + } else Fox.successful(()) + + private def containsApplyImmediatelyUpdateActions(updates: List[UpdateAction]) = updates.exists { + case _: ApplyImmediatelyUpdateAction => true + case _ => false + } + private def handleUpdateGroup(annotationId: String, updateActionGroup: UpdateActionGroup)( implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 860cb4f81b9..bda3fc5eead 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -18,7 +18,7 @@ object AnnotationLayerParameters { Json.using[WithDefaultValues].format[AnnotationLayerParameters] } -trait AnnotationUpdateAction extends UpdateAction +trait AnnotationUpdateAction extends ApplyImmediatelyUpdateAction case class AddLayerAnnotationUpdateAction(layerParameters: AnnotationLayerParameters, tracingId: String, @@ -48,7 +48,7 @@ case class DeleteLayerAnnotationUpdateAction(tracingId: String, } case class UpdateLayerMetadataAnnotationUpdateAction(tracingId: String, - layerName: String, // Just stored for nicer-looking history + layerName: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index fff6b7890bf..e7abe01cb79 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -10,6 +10,7 @@ import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappin import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingLayer, EditableMappingService, @@ -428,6 +429,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) + _ <- remoteWebknossosClient.updateAnnotationLayers(annotationId, + updatedWithNewVerson.annotation.layers + .map(AnnotationLayer.fromProto) + .toList) // TODO perf: skip if no layer changes } yield updatedWithNewVerson } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 6b56c292440..b13a71c58db 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -54,6 +54,8 @@ trait UpdateAction { def isViewOnlyChange: Boolean = false } +trait ApplyImmediatelyUpdateAction extends UpdateAction + trait LayerUpdateAction extends UpdateAction { def actionTracingId: String } From 6bd5ceec8a1940b4c4d7a9ccdbccdcafbcaff90e Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 09:39:35 +0200 Subject: [PATCH 106/361] wip add layer --- app/controllers/AnnotationController.scala | 4 ++-- app/controllers/UserTokenController.scala | 4 ++-- .../WKRemoteTracingStoreController.scala | 4 ++-- .../TSRemoteWebknossosClient.scala | 7 +++++++ .../AnnotationTransactionService.scala | 7 ++++--- .../annotation/AnnotationUpdateActions.scala | 2 +- .../annotation/AnnotationWithTracings.scala | 9 ++++++--- .../annotation/TSAnnotationService.scala | 19 +++++++++++++++---- .../SkeletonTracingController.scala | 4 ++-- .../controllers/VolumeTracingController.scala | 7 ++++--- .../tracingstore/tracings/TracingId.scala | 11 +++++++++++ .../tracings/TracingService.scala | 9 +-------- 12 files changed, 57 insertions(+), 30 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index e1b2dfa0128..87c2b734ca0 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -13,7 +13,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ } import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters -import com.scalableminds.webknossos.tracingstore.tracings.{TracingIds, TracingType} +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} import models.annotation.AnnotationState.Cancelled @@ -273,7 +273,7 @@ class AnnotationController @Inject()( ObjectId.dummyId, ObjectId.dummyId, List( - AnnotationLayer(TracingIds.dummyTracingId, + AnnotationLayer(TracingId.dummy, AnnotationLayerType.Skeleton, AnnotationLayer.defaultSkeletonLayerName, AnnotationLayerStatistics.unknown)) diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 29580465eeb..22127329f93 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } -import com.scalableminds.webknossos.tracingstore.tracings.TracingIds +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import javax.inject.Inject import models.annotation._ @@ -161,7 +161,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, mode: AccessMode, userBox: Box[User], token: Option[String]): Fox[UserAccessAnswer] = - if (tracingId == TracingIds.dummyTracingId) + if (tracingId == TracingId.dummy) Fox.successful(UserAccessAnswer(granted = true)) else for { diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 14eb8e2146b..b64e4ed87d3 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport -import com.scalableminds.webknossos.tracingstore.tracings.TracingIds +import com.scalableminds.webknossos.tracingstore.tracings.TracingId import javax.inject.Inject import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotationViewOnlyEvent} @@ -140,7 +140,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext - if (tracingId == TracingIds.dummyTracingId) { + if (tracingId == TracingId.dummy) { Fox.successful(Ok(Json.toJson(ObjectId.dummyId))) } else { for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 66976f7fd24..04dac6793a3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -5,6 +5,8 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC @@ -14,6 +16,7 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessAnswer, UserAccessRequest } +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json, OFormat} @@ -96,6 +99,10 @@ class TSRemoteWebknossosClient @Inject()( .addQueryString("key" -> tracingStoreKey) .postJson(annotationLayers) + def createTracingFor(annotationId: String, + layerParameters: AnnotationLayerParameters): Fox[Either[SkeletonTracing, VolumeTracing]] = + ??? // TODO + override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index c69dfc1fb9d..2399ed2549e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} import com.scalableminds.webknossos.tracingstore.tracings.volume.{ BucketMutatingVolumeUpdateAction, UpdateBucketVolumeAction, @@ -228,8 +228,9 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe case first :: rest => first.addInfo(updateActionGroup.info) :: rest } actionsWithInfo.map { - case a: UpdateBucketVolumeAction => a.withoutBase64Data - case a => a + case a: UpdateBucketVolumeAction => a.withoutBase64Data + case a: AddLayerAnnotationUpdateAction => a.copy(tracingId = Some(TracingId.generate)) + case a => a } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index bda3fc5eead..f6ab1f62fee 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -21,7 +21,7 @@ object AnnotationLayerParameters { trait AnnotationUpdateAction extends ApplyImmediatelyUpdateAction case class AddLayerAnnotationUpdateAction(layerParameters: AnnotationLayerParameters, - tracingId: String, + tracingId: Option[String] = None, // filled in by backend eagerly on save actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 658cac9da17..6996b398d6d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -78,14 +78,17 @@ case class AnnotationWithTracings( def version: Long = annotation.version - def addTracing(a: AddLayerAnnotationUpdateAction): AnnotationWithTracings = + def addLayer(a: AddLayerAnnotationUpdateAction, + tracingId: String, + tracing: Either[SkeletonTracing, VolumeTracing]): AnnotationWithTracings = this.copy( annotation = annotation.copy( layers = annotation.layers :+ AnnotationLayerProto( - a.tracingId, + tracingId, a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), `type` = AnnotationLayerType.toProto(a.layerParameters.typ) - )) + )), + tracingsById = tracingsById.updated(tracingId, tracing) ) def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index e7abe01cb79..a8911ed7e69 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -35,7 +35,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, TracingDataStore, - TracingIds, + TracingId, TracingSelector, VersionedKeyValuePair } @@ -108,7 +108,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => - Fox.successful(annotationWithTracings.addTracing(a)) + addLayer(annotationId, annotationWithTracings, a) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotationWithTracings.deleteTracing(a)) case a: UpdateLayerMetadataAnnotationUpdateAction => @@ -134,6 +134,17 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield updated + private def addLayer( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: AddLayerAnnotationUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + for { + _ <- Fox.successful(()) + tracingId <- action.tracingId ?~> "add layer action has no tracingId" + tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters) + updated = annotationWithTracings.addLayer(action, tracingId, tracing) + } yield updated + private def revertToVersion( annotationId: String, annotationWithTracings: AnnotationWithTracings, @@ -536,7 +547,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version: Option[Long] = None, useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = - if (tracingId == TracingIds.dummyTracingId) + if (tracingId == TracingId.dummy) Fox.successful(volumeTracingService.dummyTracing) else { for { @@ -551,7 +562,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version: Option[Long] = None, useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = - if (tracingId == TracingIds.dummyTracingId) + if (tracingId == TracingId.dummy) Fox.successful(skeletonTracingService.dummyTracing) else { for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index fe1e105568e..f41279b4928 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} @@ -106,7 +106,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) case _ => None } - newTracingId = skeletonTracingService.generateTracingId + newTracingId = TracingId.generate mergedVolumeStats <- skeletonTracingService.mergeVolumeData(request.body.flatten, tracingsWithIds.map(_._1), newTracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index f84c7ef7d53..75b8b61151b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -35,7 +35,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeSegmentStatisticsService, VolumeTracingService } -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingSelector} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, @@ -146,7 +146,7 @@ class VolumeTracingController @Inject()( case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) case _ => None } - newTracingId = volumeTracingService.generateTracingId + newTracingId = TracingId.generate mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, tracingsWithIds.map(_._1), newTracingId, @@ -299,10 +299,11 @@ class VolumeTracingController @Inject()( boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newTracingId = volumeTracingService.generateTracingId + newTracingId = TracingId.generate // TODO /*_ <- Fox.runIf(tracing.getHasEditableMapping)( editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ + // TODO actionTracingIds + addLayer tracing ids need to be remapped (as they need to be globally unique) (newId, newTracing) <- volumeTracingService.duplicate( annotationId, tracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala new file mode 100644 index 00000000000..9c6a1af49eb --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingId.scala @@ -0,0 +1,11 @@ +package com.scalableminds.webknossos.tracingstore.tracings + +import java.util.UUID + +object TracingId { + + def generate: String = UUID.randomUUID.toString + + lazy val dummy: String = "dummyTracingId" + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index afe6fd032a4..c0f06df09cc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -10,14 +10,9 @@ import net.liftweb.common.Box import play.api.i18n.MessagesProvider import scalapb.{GeneratedMessage, GeneratedMessageCompanion} -import java.util.UUID import scala.concurrent.ExecutionContext import scala.concurrent.duration._ -object TracingIds { - val dummyTracingId: String = "dummyTracingId" -} - trait TracingService[T <: GeneratedMessage] extends KeyValueStoreImplicits with FoxImplicits @@ -65,10 +60,8 @@ trait TracingService[T <: GeneratedMessage] } */ - def generateTracingId: String = UUID.randomUUID.toString - def save(tracing: T, tracingId: Option[String], version: Long, toCache: Boolean = false): Fox[String] = { - val id = tracingId.getOrElse(generateTracingId) + val id = tracingId.getOrElse(TracingId.generate) if (toCache) { temporaryTracingStore.insert(id, tracing, Some(temporaryStoreTimeout)) temporaryTracingIdStore.insert(temporaryIdKey(id), "", Some(temporaryIdStoreTimeout)) From 40b7e435bf1d62dc0bb7c77eb83c976920407d1d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 10:59:51 +0200 Subject: [PATCH 107/361] create tracing proto on wk side --- app/controllers/AnnotationController.scala | 70 ----- .../WKRemoteTracingStoreController.scala | 22 ++ app/models/annotation/AnnotationService.scala | 255 ++++++++---------- conf/webknossos.latest.routes | 3 +- .../TSRemoteWebknossosClient.scala | 20 +- 5 files changed, 146 insertions(+), 224 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 87c2b734ca0..d5a1943f01f 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -183,54 +183,6 @@ class AnnotationController @Inject()( } yield JsonOk(json, Messages("annotation.isLockedByOwner.success")) } - def addAnnotationLayer(typ: String, id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - newLayerName = request.body.name.getOrElse(AnnotationLayer.defaultNameForType(request.body.typ)) - _ <- bool2Fox(!annotation.annotationLayers.exists(_.name == newLayerName)) ?~> "annotation.addLayer.nameInUse" - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.addAnnotationLayer(annotation, organization._id, request.body) - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def addAnnotationLayerWithoutType(id: String): Action[AnnotationLayerParameters] = - sil.SecuredAction.async(validateJson[AnnotationLayerParameters]) { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- addAnnotationLayer(annotation.typ.toString, id)(request) - } yield result - } - - def deleteAnnotationLayer(typ: String, id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.deleteLayer.explorationalsOnly" - annotation <- provider.provideAnnotation(typ, id, request.identity) - _ <- bool2Fox(annotation._user == request.identity._id) ?~> "notAllowed" ~> FORBIDDEN - layer <- annotation.annotationLayers.find(annotationLayer => annotationLayer.name == layerName) ?~> Messages( - "annotation.layer.notFound", - layerName) - _ <- bool2Fox(annotation.annotationLayers.length != 1) ?~> "annotation.deleteLayer.onlyLayer" - _ = logger.info( - s"Deleting annotation layer $layerName (tracing id ${layer.tracingId}, typ ${layer.typ}) for annotation $id") - _ <- annotationService.deleteAnnotationLayer(annotation, layerName) - } yield Ok - } - - def deleteAnnotationLayerWithoutType(id: String, layerName: String): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- deleteAnnotationLayer(annotation.typ.toString, id, layerName)(request) - } yield result - } - def createExplorational(organizationId: String, datasetName: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { @@ -282,28 +234,6 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.addLayer.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - organization <- organizationDAO.findOne(request.identity._organization) - _ <- annotationService.makeAnnotationHybrid(annotation, organization._id, fallbackLayerName) ?~> "annotation.makeHybrid.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def makeHybridWithoutType(id: String, fallbackLayerName: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- makeHybrid(annotation.typ.toString, id, fallbackLayerName)(request) - } yield result - } - def downsample(typ: String, id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index b64e4ed87d3..2f4c966b8a1 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,9 +3,12 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.TracingId import javax.inject.Inject @@ -16,6 +19,7 @@ import models.annotation.{ AnnotationDAO, AnnotationInformationProvider, AnnotationLayerDAO, + AnnotationService, TracingDataSourceTemporaryStore, TracingStoreService } @@ -26,6 +30,7 @@ import models.user.time.TimeSpanService import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import scalapb.GeneratedMessage import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} import utils.{ObjectId, WkConf} @@ -39,6 +44,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore userDAO: UserDAO, annotationInformationProvider: AnnotationInformationProvider, analyticsService: AnalyticsService, + annotationService: AnnotationService, datasetDAO: DatasetDAO, annotationDAO: AnnotationDAO, annotationLayerDAO: AnnotationLayerDAO, @@ -168,4 +174,20 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } yield Ok(Json.toJson(dataStore.url)) } } + + def createTracing(name: String, key: String, annotationId: String): Action[AnnotationLayerParameters] = + Action.async(validateJson[AnnotationLayerParameters]) { implicit request => + tracingStoreService.validateAccess(name, key) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + tracingEither <- annotationService.createTracingForExplorational(annotationIdValidated, request.body) + tracing: GeneratedMessage = tracingEither match { + case Left(s: SkeletonTracing) => s + case Right(v: VolumeTracing) => v + } + } yield Ok(tracing.toByteArray).as(protobufMimeType) + } + } + } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 57327d2924e..a6e34a8ec71 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -34,7 +34,6 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ } import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters -import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ResolutionRestrictions, @@ -196,44 +195,34 @@ class AnnotationService @Inject()( VolumeTracingDefaults.largestSegmentId } - def addAnnotationLayer(annotation: Annotation, - organizationId: String, - annotationLayerParameters: AnnotationLayerParameters)(implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) ?~> "dataSource.notFound" - newAnnotationLayers <- createTracingsForExplorational( - dataset, - dataSource, - annotation._id, - List(annotationLayerParameters), - organizationId, - annotation.annotationLayers) ?~> "annotation.createTracings.failed" - _ <- annotationLayersDAO.insertForAnnotation(annotation._id, newAnnotationLayers) - } yield () - - def deleteAnnotationLayer(annotation: Annotation, layerName: String): Fox[Unit] = - for { - _ <- annotationLayersDAO.deleteOneByName(annotation._id, layerName) - } yield () - - private def createTracingsForExplorational(dataset: Dataset, - dataSource: DataSource, - annotationId: ObjectId, - allAnnotationLayerParameters: List[AnnotationLayerParameters], - datasetOrganizationId: String, - existingAnnotationLayers: List[AnnotationLayer] = List())( + def createTracingForExplorational(annotationId: ObjectId, params: AnnotationLayerParameters)( implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[List[AnnotationLayer]] = { + mp: MessagesProvider): Fox[Either[SkeletonTracing, VolumeTracing]] = { + + def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + dataset: Dataset): Fox[Option[FetchedAnnotationLayer]] = + if (existingAnnotationLayers.isEmpty) Fox.successful(None) + else + for { + oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) + tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) + else + tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, + None, + skipVolumeData = true, + volumeDataZipFormat = VolumeDataZipFormat.wkw, + dataset.voxelSize) + } yield Some(oldPrecedenceLayerFetched) - def getAutoFallbackLayerName: Option[String] = + def getAutoFallbackLayerName(dataSource: DataSource): Option[String] = dataSource.dataLayers.find { case _: SegmentationLayer => true case _ => false }.map(_.name) - def getFallbackLayer(fallbackLayerName: String): Fox[SegmentationLayer] = + def getFallbackLayer(dataSource: DataSource, fallbackLayerName: String): Fox[SegmentationLayer] = for { fallbackLayer <- dataSource.dataLayers .filter(dl => dl.name == fallbackLayerName) @@ -251,87 +240,6 @@ class AnnotationService @Inject()( fallbackLayer.elementClass) } yield fallbackLayer - def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters, - oldPrecedenceLayerProperties: Option[RedundantTracingProperties], - dataStore: DataStore): Fox[AnnotationLayer] = - for { - client <- tracingStoreService.clientFor(dataset) - tracingIdAndName <- annotationLayerParameters.typ match { - case AnnotationLayerType.Skeleton => - val skeleton = SkeletonTracingDefaults.createInstance.copy( - datasetName = dataset.name, - editPosition = dataSource.center, - organizationId = Some(datasetOrganizationId), - additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) - ) - val skeletonAdapted = oldPrecedenceLayerProperties.map { p => - skeleton.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(skeleton) - for { - tracingId <- client.saveSkeletonTracing(skeletonAdapted) - name = annotationLayerParameters.name.getOrElse( - AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (tracingId, name) - case AnnotationLayerType.Volume => - val autoFallbackLayerName = - if (annotationLayerParameters.autoFallbackLayer) getAutoFallbackLayerName else None - val fallbackLayerName = annotationLayerParameters.fallbackLayerName.orElse(autoFallbackLayerName) - for { - fallbackLayer <- Fox.runOptional(fallbackLayerName)(getFallbackLayer) - volumeTracing <- createVolumeTracing( - dataSource, - datasetOrganizationId, - dataStore, - fallbackLayer, - resolutionRestrictions = - annotationLayerParameters.resolutionRestrictions.getOrElse(ResolutionRestrictions.empty), - mappingName = annotationLayerParameters.mappingName - ) - volumeTracingAdapted = oldPrecedenceLayerProperties.map { p => - volumeTracing.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(volumeTracing) - volumeTracingId <- client.saveVolumeTracing(volumeTracingAdapted, dataSource = Some(dataSource)) - name = annotationLayerParameters.name - .orElse(autoFallbackLayerName) - .getOrElse(AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - } yield (volumeTracingId, name) - case _ => - Fox.failure(s"Unknown AnnotationLayerType: ${annotationLayerParameters.typ}") - } - } yield - AnnotationLayer(tracingIdAndName._1, - annotationLayerParameters.typ, - tracingIdAndName._2, - AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) - - def fetchOldPrecedenceLayer: Fox[Option[FetchedAnnotationLayer]] = - if (existingAnnotationLayers.isEmpty) Fox.successful(None) - else - for { - oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) - else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, - None, - skipVolumeData = true, - volumeDataZipFormat = VolumeDataZipFormat.wkw, - dataset.voxelSize) - } yield Some(oldPrecedenceLayerFetched) - def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = oldPrecedenceLayer.tracing match { case Left(s) => @@ -354,6 +262,84 @@ class AnnotationService @Inject()( ) } + for { + annotation <- annotationDAO.findOne(annotationId) + dataset <- datasetDAO.findOne(annotation._dataset) + dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" + inboxDataSource <- datasetService.dataSourceFor(dataset) + dataSource <- inboxDataSource.toUsable ?~> Messages("dataset.notImported", inboxDataSource.id.name) + oldPrecedenceLayer <- fetchOldPrecedenceLayer(annotation.annotationLayers, dataset) + oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( + extractPrecedenceProperties) + tracing <- params.typ match { + case AnnotationLayerType.Skeleton => + val skeleton = SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + editPosition = dataSource.center, + organizationId = Some(dataset._organization), + additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) + ) + val skeletonAdapted = oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + skeleton.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(skeleton) + Fox.successful(Left(skeletonAdapted)) + case AnnotationLayerType.Volume => + val autoFallbackLayerName = + if (params.autoFallbackLayer) getAutoFallbackLayerName(dataSource) else None + val fallbackLayerName = params.fallbackLayerName.orElse(autoFallbackLayerName) + for { + fallbackLayer <- Fox.runOptional(fallbackLayerName)(n => getFallbackLayer(dataSource, n)) + volumeTracing <- createVolumeTracing( + dataSource, + dataset._organization, + dataStore, + fallbackLayer, + resolutionRestrictions = params.resolutionRestrictions.getOrElse(ResolutionRestrictions.empty), + mappingName = params.mappingName + ) + volumeTracingAdapted = oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + volumeTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(volumeTracing) + } yield Right(volumeTracingAdapted) + } + } yield tracing + } + + private def createLayersForExplorational(dataset: Dataset, + annotationId: ObjectId, + allAnnotationLayerParameters: List[AnnotationLayerParameters], + existingAnnotationLayers: List[AnnotationLayer])( + implicit ctx: DBAccessContext, + mp: MessagesProvider): Fox[List[AnnotationLayer]] = { + + def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters): Fox[AnnotationLayer] = + for { + client <- tracingStoreService.clientFor(dataset) + tracing <- createTracingForExplorational(annotationId, annotationLayerParameters) + layerName = annotationLayerParameters.name.getOrElse( + AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) + tracingId <- tracing match { + case Left(skeleton) => client.saveSkeletonTracing(skeleton) + case Right(volume) => client.saveVolumeTracing(volume) + } + } yield + AnnotationLayer(tracingId, + annotationLayerParameters.typ, + layerName, + AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) + def createAndSaveAnnotationProto(annotationId: ObjectId, annotationLayers: List[AnnotationLayer]): Fox[Unit] = { val layersProto = annotationLayers.map { l => AnnotationLayerProto( @@ -362,7 +348,7 @@ class AnnotationService @Inject()( AnnotationLayerType.toProto(l.typ) ) } - // todo pass right name, description here + //TODO pass right name, description here val annotationProto = AnnotationProto(name = None, description = None, version = 0L, layers = layersProto) for { tracingStoreClient <- tracingStoreService.clientFor(dataset) @@ -380,11 +366,7 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayer <- fetchOldPrecedenceLayer - dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" - precedenceProperties = oldPrecedenceLayer.map(extractPrecedenceProperties) - newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(p => - createAndSaveAnnotationLayer(p, precedenceProperties, dataStore)) + newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(createAndSaveAnnotationLayer) _ <- createAndSaveAnnotationProto(annotationId, newAnnotationLayers) } yield newAnnotationLayers } @@ -410,42 +392,17 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[Annotation] = for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" - dataSource <- datasetService.dataSourceFor(dataset) - datasetOrganization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" - usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.name) newAnnotationId = ObjectId.generate - annotationLayers <- createTracingsForExplorational(dataset, - usableDataSource, - newAnnotationId, - annotationLayerParameters, - datasetOrganization._id) ?~> "annotation.createTracings.failed" + annotationLayers <- createLayersForExplorational( + dataset, + newAnnotationId, + annotationLayerParameters, + existingAnnotationLayers = List.empty) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) } yield annotation - def makeAnnotationHybrid(annotation: Annotation, organizationId: String, fallbackLayerName: Option[String])( - implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[Unit] = - for { - newAnnotationLayerType <- annotation.tracingType match { - case TracingType.skeleton => Fox.successful(AnnotationLayerType.Volume) - case TracingType.volume => Fox.successful(AnnotationLayerType.Skeleton) - case _ => Fox.failure("annotation.makeHybrid.alreadyHybrid") - } - usedFallbackLayerName = if (newAnnotationLayerType == AnnotationLayerType.Volume) fallbackLayerName else None - newAnnotationLayerParameters = AnnotationLayerParameters( - newAnnotationLayerType, - usedFallbackLayerName, - autoFallbackLayer = false, - None, - Some(ResolutionRestrictions.empty), - Some(AnnotationLayer.defaultNameForType(newAnnotationLayerType)), - None - ) - _ <- addAnnotationLayer(annotation, organizationId, newAnnotationLayerParameters) ?~> "makeHybrid.createTracings.failed" - } yield () - def downsampleAnnotation(annotation: Annotation, volumeAnnotationLayer: AnnotationLayer)( implicit ctx: DBAccessContext): Fox[Unit] = for { diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 64fdcd470a4..88bd96335ac 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -128,6 +128,7 @@ GET /tracingstores/:name/dataSource GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) +POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: String) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -146,7 +147,6 @@ PATCH /annotations/:typ/:id/transfer PATCH /annotations/:typ/:id/editLockedState controllers.AnnotationController.editLockedState(typ: String, id: String, isLockedByOwner: Boolean) GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) -PATCH /annotations/:id/makeHybrid controllers.AnnotationController.makeHybridWithoutType(id: String, fallbackLayerName: Option[String]) PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) PATCH /annotations/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayerWithoutType(id: String) PATCH /annotations/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayerWithoutType(id: String, layerName: String) @@ -157,7 +157,6 @@ POST /annotations/:id/acquireMutex PATCH /annotations/addSegmentIndicesToAll controllers.AnnotationController.addSegmentIndicesToAll(parallelBatchCount: Int, dryRun: Boolean, skipTracings: Option[String]) GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) -PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String, fallbackLayerName: Option[String]) PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) PATCH /annotations/:typ/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayer(typ: String, id: String) PATCH /annotations/:typ/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayer(typ: String, id: String, layerName: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 04dac6793a3..43f73e1a87f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -7,7 +7,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ @@ -17,6 +17,7 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessRequest } import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json, OFormat} @@ -100,8 +101,21 @@ class TSRemoteWebknossosClient @Inject()( .postJson(annotationLayers) def createTracingFor(annotationId: String, - layerParameters: AnnotationLayerParameters): Fox[Either[SkeletonTracing, VolumeTracing]] = - ??? // TODO + layerParameters: AnnotationLayerParameters): Fox[Either[SkeletonTracing, VolumeTracing]] = { + val req = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/createTracing") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + layerParameters.typ match { + case AnnotationLayerType.Volume => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, VolumeTracing](layerParameters)(VolumeTracing) + .map(Right(_)) + case AnnotationLayerType.Skeleton => + req + .postJsonWithProtoResponse[AnnotationLayerParameters, SkeletonTracing](layerParameters)(SkeletonTracing) + .map(Left(_)) + } + } override def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") From 6b3dd0faf882a5531d0b8c7f2341b4d980594e1a Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 11:24:09 +0200 Subject: [PATCH 108/361] fix creating annotation layers for fresh annotation --- app/controllers/AnnotationIOController.scala | 6 +- .../WKRemoteTracingStoreController.scala | 6 +- .../AnnotationLayerPrecedence.scala | 143 +++++++++++++ app/models/annotation/AnnotationService.scala | 191 ++++-------------- app/models/annotation/nml/NmlWriter.scala | 15 +- conf/webknossos.latest.routes | 4 - 6 files changed, 196 insertions(+), 169 deletions(-) create mode 100644 app/models/annotation/AnnotationLayerPrecedence.scala diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index a3e2bdf490c..c06dc1bb7a0 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -79,6 +79,7 @@ class AnnotationIOController @Inject()( extends Controller with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { implicit val actorSystem: ActorSystem = ActorSystem() @@ -332,9 +333,8 @@ class AnnotationIOController @Inject()( boundingBox = bbox, elementClass = elementClass, fallbackLayer = fallbackLayerOpt.map(_.name), - largestSegmentId = - annotationService.combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, - fallbackLayerOpt.map(_.largestSegmentId)), + largestSegmentId = combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, + fallbackLayerOpt.map(_.largestSegmentId)), resolutions = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), hasSegmentIndex = Some(tracingCanHaveSegmentIndex) ) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 2f4c966b8a1..4ce52bdef84 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -181,7 +181,11 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore implicit val ctx: DBAccessContext = GlobalAccessContext for { annotationIdValidated <- ObjectId.fromString(annotationId) - tracingEither <- annotationService.createTracingForExplorational(annotationIdValidated, request.body) + annotation <- annotationDAO.findOne(annotationIdValidated) ?~> "annotation.notFound" + dataset <- datasetDAO.findOne(annotation._dataset) + tracingEither <- annotationService.createTracingForExplorational(dataset, + request.body, + annotation.annotationLayers) tracing: GeneratedMessage = tracingEither match { case Left(s: SkeletonTracing) => s case Right(v: VolumeTracing) => v diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala new file mode 100644 index 00000000000..8d255cae38c --- /dev/null +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -0,0 +1,143 @@ +package models.annotation + +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.{ + AdditionalCoordinateProto, + NamedBoundingBoxProto, + Vec3DoubleProto, + Vec3IntProto +} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerType, + FetchedAnnotationLayer +} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeDataZipFormat, VolumeTracingDefaults} +import models.dataset.Dataset + +import scala.concurrent.ExecutionContext + +// Used to pass duplicate properties when creating a new tracing to avoid masking them. +// Uses the proto-generated geometry classes, hence the full qualifiers. +case class RedundantTracingProperties( + editPosition: Vec3IntProto, + editRotation: Vec3DoubleProto, + zoomLevel: Double, + userBoundingBoxes: Seq[NamedBoundingBoxProto], + editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], +) + +trait AnnotationLayerPrecedence { + + protected def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], + fromFallbackLayer: Option[Option[Long]]): Option[Long] = + if (fromNml.nonEmpty) + // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. + fromNml + else if (fromFallbackLayer.nonEmpty) + // There is a fallback layer. Use its largestSegmentId, even if it is None. + // Some tracing functionality will be disabled until a segment id is set by the user. + fromFallbackLayer.flatten + else { + // There is no fallback layer. Start at default segment id for fresh volume layers + VolumeTracingDefaults.largestSegmentId + } + + protected def adaptSkeletonTracing( + skeletonTracing: SkeletonTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): SkeletonTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + skeletonTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(skeletonTracing) + + protected def adaptVolumeTracing(volumeTracing: VolumeTracing, + oldPrecedenceLayerProperties: Option[RedundantTracingProperties]): VolumeTracing = + oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => + volumeTracing.copy( + editPosition = p.editPosition, + editRotation = p.editRotation, + zoomLevel = p.zoomLevel, + userBoundingBoxes = p.userBoundingBoxes, + editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates + ) + }.getOrElse(volumeTracing) + + protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = + for { + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, dataset, tracingStoreClient) + oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( + extractPrecedenceProperties) + } yield oldPrecedenceLayerProperties + + // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) + protected def selectLayerWithPrecedenceFetched( + skeletonLayers: List[FetchedAnnotationLayer], + volumeLayers: List[FetchedAnnotationLayer])(implicit ec: ExecutionContext): Fox[FetchedAnnotationLayer] = + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("annotation.download.noLayers") + + private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer])( + implicit ec: ExecutionContext): Fox[AnnotationLayer] = { + val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) + val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) + if (skeletonLayers.nonEmpty) { + Fox.successful(skeletonLayers.minBy(_.tracingId)) + } else if (volumeLayers.nonEmpty) { + Fox.successful(volumeLayers.minBy(_.tracingId)) + } else Fox.failure("Trying to select precedence layer from empty layer list.") + } + + private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + dataset: Dataset, + tracingStoreClient: WKRemoteTracingStoreClient)( + implicit ec: ExecutionContext): Fox[Option[FetchedAnnotationLayer]] = + if (existingAnnotationLayers.isEmpty) Fox.successful(None) + else + for { + oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) + oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) + tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) + else + tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, + None, + skipVolumeData = true, + volumeDataZipFormat = VolumeDataZipFormat.wkw, + dataset.voxelSize) + } yield Some(oldPrecedenceLayerFetched) + + private def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = + oldPrecedenceLayer.tracing match { + case Left(s) => + RedundantTracingProperties( + s.editPosition, + s.editRotation, + s.zoomLevel, + s.userBoundingBoxes ++ s.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + s.editPositionAdditionalCoordinates + ) + case Right(v) => + RedundantTracingProperties( + v.editPosition, + v.editRotation, + v.zoomLevel, + v.userBoundingBoxes ++ v.userBoundingBox.map( + com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), + v.editPositionAdditionalCoordinates + ) + } +} diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index a6e34a8ec71..7c22ca76ec0 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -10,13 +10,7 @@ import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationProto} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} -import com.scalableminds.webknossos.datastore.geometry.{ - AdditionalCoordinateProto, - ColorProto, - NamedBoundingBoxProto, - Vec3DoubleProto, - Vec3IntProto -} +import com.scalableminds.webknossos.datastore.geometry.ColorProto import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{ @@ -37,7 +31,6 @@ import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParam import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ResolutionRestrictions, - VolumeDataZipFormat, VolumeTracingDefaults, VolumeTracingDownsampling } @@ -75,16 +68,6 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String], organizationId: String, datasetName: String) -// Used to pass duplicate properties when creating a new tracing to avoid masking them. -// Uses the proto-generated geometry classes, hence the full qualifiers. -case class RedundantTracingProperties( - editPosition: Vec3IntProto, - editRotation: Vec3DoubleProto, - zoomLevel: Double, - userBoundingBoxes: Seq[NamedBoundingBoxProto], - editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto], -) - class AnnotationService @Inject()( annotationInformationProvider: AnnotationInformationProvider, savedTracingInformationHandler: SavedTracingInformationHandler, @@ -114,6 +97,7 @@ class AnnotationService @Inject()( extends BoxImplicits with FoxImplicits with ProtoGeometryImplicits + with AnnotationLayerPrecedence with LazyLogging { implicit val actorSystem: ActorSystem = ActorSystem() @@ -181,41 +165,12 @@ class AnnotationService @Inject()( ) } - def combineLargestSegmentIdsByPrecedence(fromNml: Option[Long], - fromFallbackLayer: Option[Option[Long]]): Option[Long] = - if (fromNml.nonEmpty) - // This was called for an NML upload. The NML had an explicit largestSegmentId. Use that. - fromNml - else if (fromFallbackLayer.nonEmpty) - // There is a fallback layer. Use its largestSegmentId, even if it is None. - // Some tracing functionality will be disabled until a segment id is set by the user. - fromFallbackLayer.flatten - else { - // There is no fallback layer. Start at default segment id for fresh volume layers - VolumeTracingDefaults.largestSegmentId - } - - def createTracingForExplorational(annotationId: ObjectId, params: AnnotationLayerParameters)( + def createTracingForExplorational(dataset: Dataset, + params: AnnotationLayerParameters, + existingAnnotationLayers: List[AnnotationLayer])( implicit ctx: DBAccessContext, mp: MessagesProvider): Fox[Either[SkeletonTracing, VolumeTracing]] = { - def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], - dataset: Dataset): Fox[Option[FetchedAnnotationLayer]] = - if (existingAnnotationLayers.isEmpty) Fox.successful(None) - else - for { - oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) - else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, - None, - skipVolumeData = true, - volumeDataZipFormat = VolumeDataZipFormat.wkw, - dataset.voxelSize) - } yield Some(oldPrecedenceLayerFetched) - def getAutoFallbackLayerName(dataSource: DataSource): Option[String] = dataSource.dataLayers.find { case _: SegmentationLayer => true @@ -240,37 +195,24 @@ class AnnotationService @Inject()( fallbackLayer.elementClass) } yield fallbackLayer - def extractPrecedenceProperties(oldPrecedenceLayer: FetchedAnnotationLayer): RedundantTracingProperties = - oldPrecedenceLayer.tracing match { - case Left(s) => - RedundantTracingProperties( - s.editPosition, - s.editRotation, - s.zoomLevel, - s.userBoundingBoxes ++ s.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - s.editPositionAdditionalCoordinates - ) - case Right(v) => - RedundantTracingProperties( - v.editPosition, - v.editRotation, - v.zoomLevel, - v.userBoundingBoxes ++ v.userBoundingBox.map( - com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _)), - v.editPositionAdditionalCoordinates - ) - } - for { - annotation <- annotationDAO.findOne(annotationId) - dataset <- datasetDAO.findOne(annotation._dataset) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) ?~> "dataStore.notFoundForDataset" inboxDataSource <- datasetService.dataSourceFor(dataset) dataSource <- inboxDataSource.toUsable ?~> Messages("dataset.notImported", inboxDataSource.id.name) - oldPrecedenceLayer <- fetchOldPrecedenceLayer(annotation.annotationLayers, dataset) - oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( - extractPrecedenceProperties) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + + /* + Note that the tracings have redundant properties, with a precedence logic selecting a layer + from which the values are used. Adding a layer may change this precedence, so the redundant + values need to be copied to the new layer from the layer that had precedence before. Otherwise, those + properties would be masked and lost. + Unfortunately, their history is still lost since the new layer gets only the latest snapshot. + We do this for *every* new layer, since we only later get its ID which determines the actual precedence. + All of this is skipped if existingAnnotationLayers is empty. + */ + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, + dataset, + tracingStoreClient) tracing <- params.typ match { case AnnotationLayerType.Skeleton => val skeleton = SkeletonTracingDefaults.createInstance.copy( @@ -279,15 +221,7 @@ class AnnotationService @Inject()( organizationId = Some(dataset._organization), additionalAxes = AdditionalAxis.toProto(dataSource.additionalAxesUnion) ) - val skeletonAdapted = oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => - skeleton.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(skeleton) + val skeletonAdapted = adaptSkeletonTracing(skeleton, oldPrecedenceLayerProperties) Fox.successful(Left(skeletonAdapted)) case AnnotationLayerType.Volume => val autoFallbackLayerName = @@ -303,15 +237,7 @@ class AnnotationService @Inject()( resolutionRestrictions = params.resolutionRestrictions.getOrElse(ResolutionRestrictions.empty), mappingName = params.mappingName ) - volumeTracingAdapted = oldPrecedenceLayerProperties.map { p: RedundantTracingProperties => - volumeTracing.copy( - editPosition = p.editPosition, - editRotation = p.editRotation, - zoomLevel = p.zoomLevel, - userBoundingBoxes = p.userBoundingBoxes, - editPositionAdditionalCoordinates = p.editPositionAdditionalCoordinates - ) - }.getOrElse(volumeTracing) + volumeTracingAdapted = adaptVolumeTracing(volumeTracing, oldPrecedenceLayerProperties) } yield Right(volumeTracingAdapted) } } yield tracing @@ -322,26 +248,25 @@ class AnnotationService @Inject()( allAnnotationLayerParameters: List[AnnotationLayerParameters], existingAnnotationLayers: List[AnnotationLayer])( implicit ctx: DBAccessContext, - mp: MessagesProvider): Fox[List[AnnotationLayer]] = { - - def createAndSaveAnnotationLayer(annotationLayerParameters: AnnotationLayerParameters): Fox[AnnotationLayer] = - for { - client <- tracingStoreService.clientFor(dataset) - tracing <- createTracingForExplorational(annotationId, annotationLayerParameters) - layerName = annotationLayerParameters.name.getOrElse( - AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) - tracingId <- tracing match { - case Left(skeleton) => client.saveSkeletonTracing(skeleton) - case Right(volume) => client.saveVolumeTracing(volume) - } - } yield - AnnotationLayer(tracingId, - annotationLayerParameters.typ, - layerName, - AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) - - def createAndSaveAnnotationProto(annotationId: ObjectId, annotationLayers: List[AnnotationLayer]): Fox[Unit] = { - val layersProto = annotationLayers.map { l => + mp: MessagesProvider): Fox[List[AnnotationLayer]] = + for { + tracingStoreClient <- tracingStoreService.clientFor(dataset) + newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters) { annotationLayerParameters => + for { + tracing <- createTracingForExplorational(dataset, annotationLayerParameters, existingAnnotationLayers) + layerName = annotationLayerParameters.name.getOrElse( + AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) + tracingId <- tracing match { + case Left(skeleton) => tracingStoreClient.saveSkeletonTracing(skeleton) + case Right(volume) => tracingStoreClient.saveVolumeTracing(volume) + } + } yield + AnnotationLayer(tracingId, + annotationLayerParameters.typ, + layerName, + AnnotationLayerStatistics.zeroedForType(annotationLayerParameters.typ)) + } + layersProto = newAnnotationLayers.map { l => AnnotationLayerProto( l.tracingId, l.name, @@ -349,41 +274,9 @@ class AnnotationService @Inject()( ) } //TODO pass right name, description here - val annotationProto = AnnotationProto(name = None, description = None, version = 0L, layers = layersProto) - for { - tracingStoreClient <- tracingStoreService.clientFor(dataset) - _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) - } yield () - } - - for { - /* - Note that the tracings have redundant properties, with a precedence logic selecting a layer - from which the values are used. Adding a layer may change this precedence, so the redundant - values need to be copied to the new layer from the layer that had precedence before. Otherwise, those - properties would be masked and lost. - Unfortunately, their history is still lost since the new layer gets only the latest snapshot. - We do this for *every* new layer, since we only later get its ID which determines the actual precedence. - All of this is skipped if existingAnnotationLayers is empty. - */ - newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters)(createAndSaveAnnotationLayer) - _ <- createAndSaveAnnotationProto(annotationId, newAnnotationLayers) + annotationProto = AnnotationProto(name = None, description = None, version = 0L, layers = layersProto) + _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) } yield newAnnotationLayers - } - - /* - If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - This needs to match the code in NmlWriter’s selectLayerWithPrecedence, though the types are different - */ - private def selectLayerWithPrecedence(annotationLayers: List[AnnotationLayer]): Fox[AnnotationLayer] = { - val skeletonLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Skeleton) - val volumeLayers = annotationLayers.filter(_.typ == AnnotationLayerType.Volume) - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("Trying to select precedence layer from empty layer list.") - } def createExplorationalFor(user: User, datasetId: ObjectId, diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index 4a667dd5f57..1a3058c0d46 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayerType, FetchedAnnotationLayer} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.sun.xml.txw2.output.IndentingXMLStreamWriter -import models.annotation.Annotation +import models.annotation.{Annotation, AnnotationLayerPrecedence} import models.task.Task import models.user.User @@ -37,7 +37,7 @@ case class NmlParameters( editPositionAdditionalCoordinates: Seq[AdditionalCoordinateProto] ) -class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { +class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits with AnnotationLayerPrecedence { private lazy val outputService = XMLOutputFactory.newInstance() def toNmlStream(name: String, @@ -129,7 +129,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { datasetName: String, voxelSize: Option[VoxelSize]): Fox[NmlParameters] = for { - parameterSourceAnnotationLayer <- selectLayerWithPrecedence(skeletonLayers, volumeLayers) + parameterSourceAnnotationLayer <- selectLayerWithPrecedenceFetched(skeletonLayers, volumeLayers) nmlParameters = parameterSourceAnnotationLayer.tracing match { case Left(s) => NmlParameters( @@ -168,15 +168,6 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { } } yield nmlParameters - // If there is more than one tracing, select the one that has precedence for the parameters (they should be identical anyway) - private def selectLayerWithPrecedence(skeletonLayers: List[FetchedAnnotationLayer], - volumeLayers: List[FetchedAnnotationLayer]): Fox[FetchedAnnotationLayer] = - if (skeletonLayers.nonEmpty) { - Fox.successful(skeletonLayers.minBy(_.tracingId)) - } else if (volumeLayers.nonEmpty) { - Fox.successful(volumeLayers.minBy(_.tracingId)) - } else Fox.failure("annotation.download.noLayers") - private def writeParameters(parameters: NmlParameters)(implicit writer: XMLStreamWriter): Unit = Xml.withinElementSync("parameters") { Xml.withinElementSync("experiment") { diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 88bd96335ac..94240d3faad 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -148,8 +148,6 @@ PATCH /annotations/:typ/:id/editLockedState GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) -PATCH /annotations/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayerWithoutType(id: String) -PATCH /annotations/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayerWithoutType(id: String, layerName: String) DELETE /annotations/:id controllers.AnnotationController.cancelWithoutType(id: String) POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) @@ -158,8 +156,6 @@ PATCH /annotations/addSegmentIndicesToAll GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) -PATCH /annotations/:typ/:id/addAnnotationLayer controllers.AnnotationController.addAnnotationLayer(typ: String, id: String) -PATCH /annotations/:typ/:id/deleteAnnotationLayer controllers.AnnotationController.deleteAnnotationLayer(typ: String, id: String, layerName: String) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) From 344fd01c44cf2d6e37988d9892fad298a0d4b5da Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 11:49:49 +0200 Subject: [PATCH 109/361] fetch precedence layer from previous version --- .../WKRemoteTracingStoreController.scala | 8 ++++++-- .../AnnotationLayerPrecedence.scala | 12 ++++++++---- app/models/annotation/AnnotationService.scala | 9 +++++++-- conf/webknossos.latest.routes | 2 +- .../TSRemoteWebknossosClient.scala | 5 +++-- .../annotation/TSAnnotationService.scala | 19 ++++++++++--------- 6 files changed, 35 insertions(+), 20 deletions(-) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 4ce52bdef84..eebbf594f88 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -175,7 +175,10 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } - def createTracing(name: String, key: String, annotationId: String): Action[AnnotationLayerParameters] = + def createTracing(name: String, + key: String, + annotationId: String, + previousVersion: Long): Action[AnnotationLayerParameters] = Action.async(validateJson[AnnotationLayerParameters]) { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext @@ -185,7 +188,8 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore dataset <- datasetDAO.findOne(annotation._dataset) tracingEither <- annotationService.createTracingForExplorational(dataset, request.body, - annotation.annotationLayers) + annotation.annotationLayers, + Some(previousVersion)) tracing: GeneratedMessage = tracingEither match { case Left(s: SkeletonTracing) => s case Right(v: VolumeTracing) => v diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala index 8d255cae38c..96ac3ec77fd 100644 --- a/app/models/annotation/AnnotationLayerPrecedence.scala +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -20,7 +20,6 @@ import models.dataset.Dataset import scala.concurrent.ExecutionContext // Used to pass duplicate properties when creating a new tracing to avoid masking them. -// Uses the proto-generated geometry classes, hence the full qualifiers. case class RedundantTracingProperties( editPosition: Vec3IntProto, editRotation: Vec3DoubleProto, @@ -71,11 +70,15 @@ trait AnnotationLayerPrecedence { }.getOrElse(volumeTracing) protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = for { - oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, dataset, tracingStoreClient) + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, + previousVersion, + dataset, + tracingStoreClient) oldPrecedenceLayerProperties: Option[RedundantTracingProperties] = oldPrecedenceLayer.map( extractPrecedenceProperties) } yield oldPrecedenceLayerProperties @@ -102,6 +105,7 @@ trait AnnotationLayerPrecedence { } private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( implicit ec: ExecutionContext): Fox[Option[FetchedAnnotationLayer]] = @@ -110,10 +114,10 @@ trait AnnotationLayerPrecedence { for { oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, None) + tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, previousVersion) else tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, - None, + previousVersion, skipVolumeData = true, volumeDataZipFormat = VolumeDataZipFormat.wkw, dataset.voxelSize) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 7c22ca76ec0..bda33814f37 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -167,7 +167,8 @@ class AnnotationService @Inject()( def createTracingForExplorational(dataset: Dataset, params: AnnotationLayerParameters, - existingAnnotationLayers: List[AnnotationLayer])( + existingAnnotationLayers: List[AnnotationLayer], + previousVersion: Option[Long])( implicit ctx: DBAccessContext, mp: MessagesProvider): Fox[Either[SkeletonTracing, VolumeTracing]] = { @@ -211,6 +212,7 @@ class AnnotationService @Inject()( All of this is skipped if existingAnnotationLayers is empty. */ oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, + previousVersion, dataset, tracingStoreClient) tracing <- params.typ match { @@ -253,7 +255,10 @@ class AnnotationService @Inject()( tracingStoreClient <- tracingStoreService.clientFor(dataset) newAnnotationLayers <- Fox.serialCombined(allAnnotationLayerParameters) { annotationLayerParameters => for { - tracing <- createTracingForExplorational(dataset, annotationLayerParameters, existingAnnotationLayers) + tracing <- createTracingForExplorational(dataset, + annotationLayerParameters, + existingAnnotationLayers, + previousVersion = None) layerName = annotationLayerParameters.name.getOrElse( AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) tracingId <- tracing match { diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 94240d3faad..a1ace976868 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -128,7 +128,7 @@ GET /tracingstores/:name/dataSource GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) -POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: String) +POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: String, previousVersion: Long) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 43f73e1a87f..52ddf6f03f4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -17,7 +17,6 @@ import com.scalableminds.webknossos.datastore.services.{ UserAccessRequest } import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters -import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json, OFormat} @@ -101,9 +100,11 @@ class TSRemoteWebknossosClient @Inject()( .postJson(annotationLayers) def createTracingFor(annotationId: String, - layerParameters: AnnotationLayerParameters): Fox[Either[SkeletonTracing, VolumeTracing]] = { + layerParameters: AnnotationLayerParameters, + previousVersion: Long): Fox[Either[SkeletonTracing, VolumeTracing]] = { val req = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/createTracing") .addQueryString("annotationId" -> annotationId) + .addQueryString("previousVersion" -> previousVersion.toString) // used for fetching old precedence layers .addQueryString("key" -> tracingStoreKey) layerParameters.typ match { case AnnotationLayerType.Volume => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index a8911ed7e69..5a9b87a4c7c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -108,7 +108,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updated <- updateAction match { case a: AddLayerAnnotationUpdateAction => - addLayer(annotationId, annotationWithTracings, a) + addLayer(annotationId, annotationWithTracings, a, targetVersion) case a: DeleteLayerAnnotationUpdateAction => Fox.successful(annotationWithTracings.deleteTracing(a)) case a: UpdateLayerMetadataAnnotationUpdateAction => @@ -134,14 +134,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield updated - private def addLayer( - annotationId: String, - annotationWithTracings: AnnotationWithTracings, - action: AddLayerAnnotationUpdateAction)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = + private def addLayer(annotationId: String, + annotationWithTracings: AnnotationWithTracings, + action: AddLayerAnnotationUpdateAction, + targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - _ <- Fox.successful(()) - tracingId <- action.tracingId ?~> "add layer action has no tracingId" - tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters) + tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" + tracing <- remoteWebknossosClient.createTracingFor(annotationId, + action.layerParameters, + previousVersion = targetVersion - 1) updated = annotationWithTracings.addLayer(action, tracingId, tracing) } yield updated @@ -436,7 +437,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updated <- updateIter(Some(annotation.withNewUpdaters(annotation.version, targetVersion)), updates) updatedWithNewVerson = updated.withVersion(targetVersion) - _ = logger.info(s"flushing v${targetVersion}, with ${updated.skeletonStats}") + _ = logger.info(s"flushing v$targetVersion, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) From 5f13149afce4bc624e0b099f2697d516723b3df7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 11:51:47 +0200 Subject: [PATCH 110/361] isolate add layer update actions --- test/backend/UpdateGroupHandlingUnitTestSuite.scala | 2 +- .../annotation/TSAnnotationService.scala | 2 +- .../annotation/UpdateGroupHandling.scala | 13 +++++++------ 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala index bdc06c79f36..dec49103cd1 100644 --- a/test/backend/UpdateGroupHandlingUnitTestSuite.scala +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -29,7 +29,7 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) )), ) - val res = regroupByRevertActions(updateGroupsBefore) + val res = regroupByIsolationSensitiveActions(updateGroupsBefore) assert(res.length == 3) assert(res(1)._2.length == 1) assert(res(1)._1 == 6L) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 5a9b87a4c7c..6c8a91c6e80 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -272,7 +272,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" - updatesGroupsRegrouped = regroupByRevertActions(updateGroupsAsSaved) + updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) updatesFlat = updatesGroupsRegrouped.flatMap(_._2) annotationWithTracings <- findTracingsForUpdates(annotation, updatesFlat, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala index 6b8f4cf0e04..6893d1bcdc5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -4,12 +4,12 @@ import collections.SequenceUtils trait UpdateGroupHandling { - def regroupByRevertActions( + def regroupByIsolationSensitiveActions( updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { val splitGroupLists: List[List[(Long, List[UpdateAction])]] = SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => - actionGroup._2.exists(updateAction => isRevertAction(updateAction))) - // TODO assert that the groups that contain revert actions contain nothing else + actionGroup._2.exists(updateAction => isIsolationSensitiveAction(updateAction))) + // TODO assert that the *groups* that contain revert actions contain nothing else // TODO test this splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => @@ -24,8 +24,9 @@ trait UpdateGroupHandling { targetVersionOpt.map(targetVersion => (targetVersion, updates)) } - private def isRevertAction(a: UpdateAction): Boolean = a match { - case _: RevertToVersionUpdateAction => true - case _ => false + private def isIsolationSensitiveAction(a: UpdateAction): Boolean = a match { + case _: RevertToVersionUpdateAction => true + case _: AddLayerAnnotationUpdateAction => true + case _ => false } } From d5b538649cae31d08b9274e2290ec911857ca400 Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Mon, 21 Oct 2024 16:32:26 +0200 Subject: [PATCH 111/361] Unified annotation restore (#8136) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * WIP: unified version restore frontend * WIP unified version restore * add segmentation layer name to relevant version restore entries & save correct annotation version in store when fetching an annotation version * small cleanup * fix compiler warning, remove comment --------- Co-authored-by: Michael Büßemeyer Co-authored-by: Florian M --- frontend/javascripts/admin/admin_rest_api.ts | 30 +-- frontend/javascripts/oxalis/api/api_latest.ts | 5 +- frontend/javascripts/oxalis/controller.tsx | 12 +- frontend/javascripts/oxalis/default_state.ts | 1 + frontend/javascripts/oxalis/model.ts | 5 +- .../model/accessors/annotation_accessor.ts | 72 +++---- .../oxalis/model/actions/save_actions.ts | 12 +- .../model/bucket_data_handling/pushqueue.ts | 8 +- .../javascripts/oxalis/model/data_layer.ts | 9 +- .../compaction/compact_toggle_actions.ts | 1 - .../compaction/compact_update_actions.ts | 1 - .../oxalis/model/reducers/save_reducer.ts | 40 +--- .../oxalis/model/sagas/proofread_saga.ts | 9 +- .../oxalis/model/sagas/save_saga.ts | 20 +- .../oxalis/model/sagas/save_saga_constants.ts | 6 +- .../oxalis/model/sagas/volumetracing_saga.tsx | 6 +- .../oxalis/model_initialization.ts | 15 +- frontend/javascripts/oxalis/store.ts | 7 +- .../oxalis/view/action-bar/save_button.tsx | 1 - .../dataset_info_tab_view.tsx | 7 +- .../right-border-tabs/skeleton_tab_view.tsx | 8 +- .../javascripts/oxalis/view/version_entry.tsx | 134 +++++++++---- .../javascripts/oxalis/view/version_list.tsx | 83 +++----- .../javascripts/oxalis/view/version_view.tsx | 187 ++++++------------ frontend/javascripts/types/api_flow_types.ts | 1 + .../annotation/AnnotationLayerType.scala | 2 + 26 files changed, 277 insertions(+), 405 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 28204b26da5..aa2e4b95e8e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -83,7 +83,6 @@ import type { import type { NewTask, TaskCreationResponseContainer } from "admin/task/task_create_bulk_view"; import type { QueryObject } from "admin/task/task_search_form"; import { V3 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; import { parseProtoListOfLong, @@ -97,7 +96,6 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import messages from "messages"; import window, { location } from "libs/window"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import type { DatasourceConfiguration } from "types/schemas/datasource.types"; import { doWithToken } from "./api/token"; import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; @@ -841,12 +839,12 @@ export function createExplorational( export async function getTracingsForAnnotation( annotation: APIAnnotation, - versions: Versions = {}, + version: number | null | undefined, ): Promise> { const skeletonLayers = annotation.annotationLayers.filter((layer) => layer.typ === "Skeleton"); const fullAnnotationLayers = await Promise.all( annotation.annotationLayers.map((layer) => - getTracingForAnnotationType(annotation, layer, versions), + getTracingForAnnotationType(annotation, layer, version), ), ); @@ -871,27 +869,12 @@ export async function acquireAnnotationMutex( return { canEdit, blockedByUser }; } -function extractVersion( - versions: Versions, - tracingId: string, - typ: "Volume" | "Skeleton", -): number | null | undefined { - if (typ === "Skeleton") { - return versions.skeleton; - } else if (versions.volumes != null) { - return versions.volumes[tracingId]; - } - - return null; -} - export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - versions: Versions = {}, + version?: number | null | undefined, // TODO: Use this parameter ): Promise { const { tracingId, typ } = annotationLayerDescriptor; - const version = extractVersion(versions, tracingId, typ); const tracingType = typ.toLowerCase() as "skeleton" | "volume"; const possibleVersionString = version != null ? `&version=${version}` : ""; const tracingArrayBuffer = await doWithToken((token) => @@ -1042,16 +1025,17 @@ export async function downloadAnnotation( annotationId: string, annotationType: APIAnnotationType, showVolumeFallbackDownloadWarning: boolean = false, - versions: Versions = {}, + _version: number | null | undefined = null, downloadFileFormat: "zarr3" | "wkw" | "nml" = "wkw", includeVolumeData: boolean = true, ) { const searchParams = new URLSearchParams(); - Object.entries(versions).forEach(([key, val]) => { + // TODO: Use the version parameter + /*Object.entries(versions).forEach(([key, val]) => { if (val != null) { searchParams.append(`${key}Version`, val.toString()); } - }); + });*/ if (includeVolumeData && showVolumeFallbackDownloadWarning) { Toast.info(messages["annotation.no_fallback_data_included"], { diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 404d508f660..b1d8d606574 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -7,7 +7,6 @@ import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_flow_types"; import { InputKeyboardNoLoop } from "libs/input"; import { M4x4, type Matrix4x4, V3, type Vector16 } from "libs/mjs"; -import type { Versions } from "oxalis/view/version_view"; import { addTreesAndGroupsAction, setActiveNodeAction, @@ -1115,7 +1114,7 @@ class TracingApi { newMaybeCompoundType: APICompoundType | null, newAnnotationId: string, newControlMode: ControlMode, - versions?: Versions, + version?: number | undefined | null, keepUrlState: boolean = false, ) { if (newControlMode === ControlModeEnum.VIEW) @@ -1134,7 +1133,7 @@ class TracingApi { type: newControlMode, }, false, - versions, + version, ); Store.dispatch(discardSaveQueuesAction()); Store.dispatch(wkReadyAction()); diff --git a/frontend/javascripts/oxalis/controller.tsx b/frontend/javascripts/oxalis/controller.tsx index 0912a7babe9..537977d8e9d 100644 --- a/frontend/javascripts/oxalis/controller.tsx +++ b/frontend/javascripts/oxalis/controller.tsx @@ -90,14 +90,12 @@ class Controller extends React.PureComponent { tryFetchingModel() { this.props.setControllerStatus("loading"); // Preview a working annotation version if the showVersionRestore URL parameter is supplied - const versions = Utils.hasUrlParam("showVersionRestore") - ? { - skeleton: Utils.hasUrlParam("skeletonVersion") - ? Number.parseInt(Utils.getUrlParamValue("skeletonVersion")) - : 1, - } + const version = Utils.hasUrlParam("showVersionRestore") + ? Utils.hasUrlParam("version") + ? Number.parseInt(Utils.getUrlParamValue("version")) + : 1 : undefined; - Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, versions) + Model.fetch(this.props.initialMaybeCompoundType, this.props.initialCommandType, true, version) .then(() => this.modelFetchDone()) .catch((error) => { this.props.setControllerStatus("failedLoading"); diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 6124b8cdfa7..8f42b637ef8 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -177,6 +177,7 @@ const defaultState: OxalisState = { othersMayEdit: false, blockedByUser: null, annotationLayers: [], + version: 0, }, save: { queue: [], diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index f3c7e63e7bf..07f84588d31 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -1,6 +1,5 @@ import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; -import type { Versions } from "oxalis/view/version_view"; import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor"; import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import { @@ -32,14 +31,14 @@ export class OxalisModel { initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ) { try { const initializationInformation = await initialize( initialMaybeCompoundType, initialCommandType, initialFetch, - versions, + version, ); if (initializationInformation) { diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 1949bbadd23..092a5657d80 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,6 +1,5 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; -import { getVolumeTracingById } from "./volumetracing_accessor"; import type { APIAnnotationInfo } from "types/api_flow_types"; import type { EmptyObject } from "types/globals"; @@ -47,60 +46,35 @@ type TracingStatsHelper = { // biome-ignore lint/complexity/noBannedTypes: {} should be avoided actually export type CombinedTracingStats = (SkeletonTracingStats | {}) & (VolumeTracingStats | {}); -export function getStats( - tracing: Tracing, - saveQueueType: "skeleton" | "volume" | "mapping", - tracingId: string, -): TracingStats | null { - switch (saveQueueType) { - case "skeleton": { - if (!tracing.skeleton) { - return null; - } - const trees = tracing.skeleton.trees; - return { - treeCount: _.size(trees), - nodeCount: _.reduce(trees, (sum, tree) => sum + tree.nodes.size(), 0), - edgeCount: _.reduce(trees, (sum, tree) => sum + tree.edges.size(), 0), - branchPointCount: _.reduce(trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), - }; - } - case "volume": { - const volumeTracing = getVolumeTracingById(tracing, tracingId); - return { - segmentCount: volumeTracing.segments.size(), - }; - } - default: - return null; +export function getStats(tracing: Tracing): CombinedTracingStats { + const { skeleton, volumes } = tracing; + let totalSegmentCount = 0; + for (const volumeTracing of volumes) { + totalSegmentCount += volumeTracing.segments.size(); } -} - -export function getCombinedStats(tracing: Tracing): CombinedTracingStats { - const aggregatedStats: TracingStatsHelper = {}; - - if (tracing.skeleton) { - const skeletonStats = getStats(tracing, "skeleton", tracing.skeleton.tracingId); - if (skeletonStats && "treeCount" in skeletonStats) { - const { treeCount, nodeCount, edgeCount, branchPointCount } = skeletonStats; - aggregatedStats.treeCount = treeCount; - aggregatedStats.nodeCount = nodeCount; - aggregatedStats.edgeCount = edgeCount; - aggregatedStats.branchPointCount = branchPointCount; - } + let stats: TracingStats = { + segmentCount: totalSegmentCount, + }; + if (skeleton) { + stats = { + ...stats, + treeCount: _.size(skeleton.trees), + nodeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.nodes.size(), 0), + edgeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.edges.size(), 0), + branchPointCount: _.reduce(skeleton.trees, (sum, tree) => sum + _.size(tree.branchPoints), 0), + }; } + return stats; +} +export function getCreationTimestamp(tracing: Tracing) { + let timestamp = tracing.skeleton?.createdTimestamp; for (const volumeTracing of tracing.volumes) { - const volumeStats = getStats(tracing, "volume", volumeTracing.tracingId); - if (volumeStats && "segmentCount" in volumeStats) { - if (aggregatedStats.segmentCount == null) { - aggregatedStats.segmentCount = 0; - } - aggregatedStats.segmentCount += volumeStats.segmentCount; + if (!timestamp || volumeTracing.createdTimestamp < timestamp) { + timestamp = volumeTracing.createdTimestamp; } } - - return aggregatedStats; + return timestamp || 0; } export function getCombinedStatsFromServerAnnotation( diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 42d8e47747c..870ba1f730f 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -30,16 +30,14 @@ export type SaveAction = export const pushSaveQueueTransaction = ( items: Array, - saveQueueType: SaveQueueType, tracingId: string, transactionId: string = getUid(), ) => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - saveQueueType, - tracingId, transactionId, + tracingId, }) as const; export const saveNowAction = () => @@ -70,16 +68,10 @@ export const setLastSaveTimestampAction = () => timestamp: Date.now(), }) as const; -export const setVersionNumberAction = ( - version: number, - saveQueueType: SaveQueueType, - tracingId: string, -) => +export const setVersionNumberAction = (version: number) => ({ type: "SET_VERSION_NUMBER", version, - saveQueueType, - tracingId, }) as const; export const undoAction = (callback?: () => void) => diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index 894261633a5..c5e1294bf6e 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -17,6 +17,7 @@ const PUSH_DEBOUNCE_TIME = 1000; class PushQueue { cube: DataCube; + tracingId: string; // The pendingBuckets contains all buckets that should be: // - snapshotted, @@ -41,8 +42,9 @@ class PushQueue { // transaction. private waitTimeStartTimeStamp: number | null = null; - constructor(cube: DataCube) { + constructor(cube: DataCube, tracingId: string) { this.cube = cube; + this.tracingId = tracingId; this.pendingBuckets = new Set(); } @@ -131,7 +133,7 @@ class PushQueue { push = createDebouncedAbortableParameterlessCallable(this.pushImpl, PUSH_DEBOUNCE_TIME, this); - async pushTransaction(batch: Array): Promise { + private async pushTransaction(batch: Array): Promise { /* * Create a transaction from the batch and push it into the save queue. */ @@ -152,7 +154,7 @@ class PushQueue { const items = await this.fifoResolver.orderedWaitFor( createCompressedUpdateBucketActions(batch), ); - Store.dispatch(pushSaveQueueTransaction(items, "volume", this.cube.layerName)); + Store.dispatch(pushSaveQueueTransaction(items, this.tracingId, this.cube.layerName)); this.compressingBucketCount -= batch.length; } catch (error) { diff --git a/frontend/javascripts/oxalis/model/data_layer.ts b/frontend/javascripts/oxalis/model/data_layer.ts index d5b3053c543..5ceb62998aa 100644 --- a/frontend/javascripts/oxalis/model/data_layer.ts +++ b/frontend/javascripts/oxalis/model/data_layer.ts @@ -21,7 +21,12 @@ class DataLayer { fallbackLayerInfo: DataLayerType | null | undefined; isSegmentation: boolean; - constructor(layerInfo: DataLayerType, textureWidth: number, dataTextureCount: number) { + constructor( + layerInfo: DataLayerType, + textureWidth: number, + dataTextureCount: number, + tracingId: string, + ) { this.name = layerInfo.name; this.fallbackLayer = "fallbackLayer" in layerInfo && layerInfo.fallbackLayer != null @@ -46,7 +51,7 @@ class DataLayer { this.name, ); this.pullQueue = new PullQueue(this.cube, layerInfo.name, dataset.dataStore); - this.pushQueue = new PushQueue(this.cube); + this.pushQueue = new PushQueue(this.cube, tracingId); this.cube.initializeWithQueues(this.pullQueue, this.pushQueue); if (this.isSegmentation) { diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index fd989003fab..cc7ee5af199 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -7,7 +7,6 @@ import _ from "lodash"; import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import type { UpdateAction, - UpdateActionWithTracingId, UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; import { updateTreeGroupVisibility, updateTreeVisibility } from "oxalis/model/sagas/update_actions"; diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index c9e714a8229..b16e490e5e8 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -7,7 +7,6 @@ import type { DeleteNodeUpdateAction, DeleteTreeUpdateAction, UpdateAction, - UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import { moveTreeComponent } from "oxalis/model/sagas/update_actions"; import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 1fab6d3f403..06fb2975175 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -4,13 +4,9 @@ import type { Action } from "oxalis/model/actions/actions"; import type { OxalisState, SaveState } from "oxalis/store"; import type { SetVersionNumberAction } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; -import { getStats } from "oxalis/model/accessors/annotation_accessor"; +import { type CombinedTracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; -import { updateKey2 } from "oxalis/model/helpers/deep_update"; -import { - updateEditableMapping, - updateVolumeTracing, -} from "oxalis/model/reducers/volumetracing_reducer_helpers"; +import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import Date from "libs/date"; import type { UpdateAction, UpdateActionWithTracingId } from "../sagas/update_actions"; @@ -31,21 +27,9 @@ export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { } function updateVersion(state: OxalisState, action: SetVersionNumberAction) { - if (action.saveQueueType === "skeleton" && state.tracing.skeleton != null) { - return updateKey2(state, "tracing", "skeleton", { - version: action.version, - }); - } else if (action.saveQueueType === "volume") { - return updateVolumeTracing(state, action.tracingId, { - version: action.version, - }); - } else if (action.saveQueueType === "mapping") { - /*return updateEditableMapping(state, action.tracingId, { - version: action.version, - });*/ - } - - return state; + return updateKey(state, "tracing", { + version: action.version, + }); } function SaveReducer(state: OxalisState, action: Action): OxalisState { @@ -55,25 +39,18 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // update actions. const dispatchedAction = action; const { items, transactionId } = dispatchedAction; - if (items.length === 0) { - return state; - } - // Only report tracing statistics, if a "real" update to the tracing happened - const stats = _.some( + const stats: CombinedTracingStats | null = _.some( dispatchedAction.items, (ua) => ua.name !== "updateSkeletonTracing" && ua.name !== "updateVolumeTracing", ) - ? getStats(state.tracing, dispatchedAction.saveQueueType, dispatchedAction.tracingId) + ? getStats(state.tracing) : null; const { activeUser } = state; if (activeUser == null) { throw new Error("Tried to save something even though user is not logged in."); } - const updateActionChunks = _.chunk( - items, - MAXIMUM_ACTION_COUNT_PER_BATCH[dispatchedAction.saveQueueType], - ); + const updateActionChunks = _.chunk(items, MAXIMUM_ACTION_COUNT_PER_BATCH); const transactionGroupCount = updateActionChunks.length; const actionLogInfo = JSON.stringify(getActionLog().slice(-10)); @@ -99,7 +76,6 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // caught by the following check. If the bug appears again, we can investigate with more // details thanks to airbrake. if ( - dispatchedAction.saveQueueType === "skeleton" && oldQueue.length > 0 && newQueue.length > 0 && newQueue.at(-1)?.actions.some((action) => NOT_IDEMPOTENT_ACTIONS.includes(action.name)) && diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index de523f53795..95f6fa3ad37 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -268,6 +268,7 @@ function* createEditableMapping(): Saga { // Save before making the mapping editable to make sure the correct mapping is activated in the backend yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date + const tracing = yield* select((state) => state.tracing); const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (upToDateVolumeTracing == null) { throw new Error("No active segmentation tracing layer. Cannot create editble mapping."); @@ -277,7 +278,7 @@ function* createEditableMapping(): Saga { const layerName = volumeTracingId; const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); // The server increments the volume tracing's version by 1 when switching the mapping to an editable one - yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); + yield* put(setVersionNumberAction(tracing.version + 1)); yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); yield* put(setHasEditableMappingAction()); yield* put(initializeEditableMappingAction(serverEditableMapping)); @@ -453,7 +454,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { @@ -781,7 +782,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE") { @@ -942,7 +943,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); + yield* put(pushSaveQueueTransaction(items, volumeTracingId)); yield* call([Model, Model.ensureSavedState]); // Now that the changes are saved, we can split the mapping locally (because it requires diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index d94f8851f83..0311b19de8d 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -165,10 +165,11 @@ export function* sendSaveRequestToServer(): Saga { const fullSaveQueue = yield* select((state) => state.save.queue); const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); + const tracing = yield* select((state) => state.tracing); const tracings = yield* select((state) => _.compact([state.tracing.skeleton, ...state.tracing.volumes]), ); - const version = _.max(tracings.map((t) => t.version)) || 0; + const version = _.max(tracings.map((t) => t.version).concat([tracing.version])) || 0; const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; @@ -202,11 +203,7 @@ export function* sendSaveRequestToServer(): Saga { ); } - for (const tracing of tracings) { - yield* put( - setVersionNumberAction(version + versionIncrement, tracing.type, tracing.tracingId), - ); - } + yield* put(setVersionNumberAction(version + versionIncrement)); yield* put(setLastSaveTimestampAction()); yield* put(shiftSaveQueueAction(saveQueue.length)); @@ -446,7 +443,7 @@ export function* setupSavingForTracingType( ); if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items, saveQueueType, tracingId)); + yield* put(pushSaveQueueTransaction(items, tracingId)); } prevTracing = tracing; @@ -502,14 +499,7 @@ function* watchForSaveConflicts() { // old reference to tracing might be outdated now due to the // immutability. const versionOnClient = yield* select((state) => { - if (tracing.type === "volume") { - return getVolumeTracingById(state.tracing, tracing.tracingId).version; - } - const { skeleton } = state.tracing; - if (skeleton == null) { - throw new Error("Skeleton must exist at this point."); - } - return skeleton.version; + return state.tracing.version; }); const toastKey = `save_conflicts_warning_${tracing.tracingId}`; diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts index 87ace921de4..c9aa2351d4c 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts @@ -11,11 +11,7 @@ export const UNDO_HISTORY_SIZE = 20; export const SETTINGS_RETRY_DELAY = 15 * 1000; export const SETTINGS_MAX_RETRY_COUNT = 20; // 20 * 15s == 5m -export const MAXIMUM_ACTION_COUNT_PER_BATCH = { - skeleton: 5000, - volume: 1000, // Since volume saving is slower, use a lower value here. - mapping: Number.POSITIVE_INFINITY, // The back-end does not accept transactions for mappings. -} as const; +export const MAXIMUM_ACTION_COUNT_PER_BATCH = 1000; // todop: should this be smarter? // export const MAXIMUM_ACTION_COUNT_PER_SAVE = { diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 016a1feabcd..43e292c032d 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -947,11 +947,7 @@ function* handleDeleteSegmentData(): Saga { yield* put(setBusyBlockingInfoAction(true, "Segment is being deleted.")); yield* put( - pushSaveQueueTransaction( - [deleteSegmentDataVolumeAction(action.segmentId)], - "volume", - action.layerName, - ), + pushSaveQueueTransaction([deleteSegmentDataVolumeAction(action.segmentId)], action.layerName), ); yield* call([Model, Model.ensureSavedState]); diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 5ff7619bfab..d8b8fbcd97d 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -11,7 +11,6 @@ import type { APICompoundType, APISegmentationLayer, } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { computeDataTexturesSetup, getSupportedTextureSpecs, @@ -106,6 +105,7 @@ import { isFeatureAllowedByPricingPlan, } from "admin/organization/pricing_plan_utils"; import { convertServerAdditionalAxesToFrontEnd } from "./model/reducers/reducer_helpers"; +import { setVersionNumberAction } from "./model/actions/save_actions"; export const HANDLED_ERROR = "error_was_handled"; type DataLayerCollection = Record; @@ -114,7 +114,7 @@ export async function initialize( initialMaybeCompoundType: APICompoundType | null, initialCommandType: TraceOrViewCommand, initialFetch: boolean, - versions?: Versions, + version?: number | undefined | null, ): Promise< | { dataLayers: DataLayerCollection; @@ -169,7 +169,7 @@ export async function initialize( const [dataset, initialUserSettings, serverTracings] = await fetchParallel( annotation, datasetId, - versions, + version, ); const serverVolumeTracings = getServerVolumeTracings(serverTracings); const serverVolumeTracingIds = serverVolumeTracings.map((volumeTracing) => volumeTracing.id); @@ -237,12 +237,12 @@ export async function initialize( async function fetchParallel( annotation: APIAnnotation | null | undefined, datasetId: APIDatasetId, - versions?: Versions, + version: number | undefined | null, ): Promise<[APIDataset, UserConfiguration, Array]> { return Promise.all([ getDataset(datasetId, getSharingTokenFromUrlParameters()), getUserConfiguration(), // Fetch the actual tracing from the datastore, if there is an skeletonAnnotation - annotation ? getTracingsForAnnotation(annotation, versions) : [], + annotation ? getTracingsForAnnotation(annotation, version) : [], ]); } @@ -294,6 +294,7 @@ function initializeTracing( // This method is not called for the View mode const { dataset } = Store.getState(); let annotation = _annotation; + let version = 0; const { allowedModes, preferredMode } = determineAllowedModes(annotation.settings); _.extend(annotation.settings, { @@ -325,6 +326,7 @@ function initializeTracing( getSegmentationLayers(dataset).length > 0, messages["tracing.volume_missing_segmentation"], ); + version = Math.max(version, volumeTracing.version); Store.dispatch(initializeVolumeTracingAction(volumeTracing)); }); @@ -336,8 +338,10 @@ function initializeTracing( // To generate a huge amount of dummy trees, use: // import generateDummyTrees from "./model/helpers/generate_dummy_trees"; // tracing.trees = generateDummyTrees(1, 200000); + version = Math.max(version, skeletonTracing.version); Store.dispatch(initializeSkeletonTracingAction(skeletonTracing)); } + Store.dispatch(setVersionNumberAction(version)); } // Initialize 'flight', 'oblique' or 'orthogonal' mode @@ -464,6 +468,7 @@ function initializeDataLayerInstances(gpuFactor: number | null | undefined): { layer, textureInformation.textureSize, textureInformation.textureCount, + layer.name, // In case of a volume tracing layer the layer name will equal its tracingId. ); } diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 28c00f472b4..ea4e169f073 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -28,7 +28,7 @@ import type { AdditionalAxis, MetadataEntryProto, } from "types/api_flow_types"; -import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; import type { Action } from "oxalis/model/actions/actions"; import type { BoundingBoxType, @@ -50,7 +50,7 @@ import type { } from "oxalis/constants"; import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; import type DiffableMap from "libs/diffable_map"; @@ -191,6 +191,7 @@ export type AnnotationVisibility = APIAnnotationVisibility; export type RestrictionsAndSettings = Restrictions & Settings; export type Annotation = { readonly annotationId: string; + readonly version: number; readonly restrictions: RestrictionsAndSettings; readonly visibility: AnnotationVisibility; readonly annotationLayers: Array; @@ -449,7 +450,7 @@ export type SaveQueueEntry = { transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; - stats: TracingStats | null | undefined; + stats: CombinedTracingStats | null | undefined; info: string; }; export type ProgressInfo = { diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index ab12ab6fd61..d08db492659 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -13,7 +13,6 @@ import { LoadingOutlined, } from "@ant-design/icons"; import ErrorHandling from "libs/error_handling"; -import * as Utils from "libs/utils"; import FastTooltip from "components/fast_tooltip"; import { Tooltip } from "antd"; import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 5a9da5127fb..bebf6409f4e 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -13,10 +13,7 @@ import { getResolutionUnion, } from "oxalis/model/accessors/dataset_accessor"; import { getActiveResolutionInfo } from "oxalis/model/accessors/flycam_accessor"; -import { - getCombinedStats, - type CombinedTracingStats, -} from "oxalis/model/accessors/annotation_accessor"; +import { getStats, type CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; import { setAnnotationNameAction, setAnnotationDescriptionAction, @@ -272,7 +269,7 @@ export class DatasetInfoTabView extends React.PureComponent { getAnnotationStatistics() { if (this.props.isDatasetViewMode) return null; - return ; + return ; } getKeyboardShortcuts() { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx index 7a2f1b7099e..99e672661cf 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/skeleton_tab_view.tsx @@ -244,13 +244,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF if (oldVolumeTracing) { Store.dispatch(importVolumeTracingAction()); - Store.dispatch( - setVersionNumberAction( - oldVolumeTracing.version + 1, - "volume", - oldVolumeTracing.tracingId, - ), - ); + Store.dispatch(setVersionNumberAction(tracing.version + 1)); Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); await clearCache(dataset, oldVolumeTracing.tracingId); await api.data.reloadBuckets(oldVolumeTracing.tracingId); diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index f9133b26a2f..e363c9060b1 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -38,12 +38,14 @@ import type { MergeTreeUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, + UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import FormattedDate from "components/formatted_date"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import { formatUserName, getContributorById } from "oxalis/model/accessors/user_accessor"; +import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; type Description = { description: string; icon: React.ReactNode; @@ -56,7 +58,10 @@ const updateTracingDescription = { // determines the order in which update actions are checked // to describe an update action batch. See also the comment // of the `getDescriptionForBatch` function. -const descriptionFns: Record Description> = { +const descriptionFns: Record< + ServerUpdateAction["name"], + (firstAction: any, actionCount: number, tracing: HybridTracing) => Description +> = { importVolumeTracing: (): Description => ({ description: "Imported a volume tracing.", icon: , @@ -122,14 +127,28 @@ const descriptionFns: Record Descr description: `Updated the tree with id ${action.value.id}.`, icon: , }), - updateBucket: (): Description => ({ - description: "Updated the segmentation.", - icon: , - }), - updateSegmentGroups: (): Description => ({ - description: "Updated the segment groups.", - icon: , - }), + updateBucket: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segmentation of layer ${layerName}.`, + icon: , + }; + }, + updateSegmentGroups: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment groups of layer ${layerName}.`, + icon: , + }; + }, updateNode: (action: UpdateNodeUpdateAction): Description => ({ description: `Updated the node with id ${action.value.id}.`, icon: , @@ -160,26 +179,61 @@ const descriptionFns: Record Descr description: "Updated the 3D view.", icon: , }), - createSegment: (action: CreateSegmentUpdateAction): Description => ({ - description: `Added the segment with id ${action.value.id} to the segments list.`, - icon: , - }), - updateSegment: (action: UpdateSegmentUpdateAction): Description => ({ - description: `Updated the segment with id ${action.value.id} in the segments list.`, - icon: , - }), - deleteSegment: (action: DeleteSegmentUpdateAction): Description => ({ - description: `Deleted the segment with id ${action.value.id} from the segments list.`, - icon: , - }), - deleteSegmentData: (action: DeleteSegmentDataUpdateAction): Description => ({ - description: `Deleted the data of segment ${action.value.id}. All voxels with that id were overwritten with 0.`, - icon: , - }), - addSegmentIndex: (): Description => ({ - description: "Added segment index to enable segment statistics.", - icon: , - }), + createSegment: ( + firstAction: UpdateActionWithTracingId & CreateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added the segment with id ${firstAction.value.id} to the segments list of layer ${layerName}.`, + icon: , + }; + }, + updateSegment: ( + firstAction: UpdateActionWithTracingId & UpdateSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Updated the segment with id ${firstAction.value.id} in the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegment: ( + firstAction: UpdateActionWithTracingId & DeleteSegmentUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the segment with id ${firstAction.value.id} from the segments list of layer ${layerName}.`, + icon: , + }; + }, + deleteSegmentData: ( + firstAction: UpdateActionWithTracingId & DeleteSegmentDataUpdateAction, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Deleted the data of segment ${firstAction.value.id} of layer ${layerName}. All voxels with that id were overwritten with 0.`, + icon: , + }; + }, + addSegmentIndex: ( + firstAction: UpdateActionWithTracingId, + _actionCount: number, + tracing: HybridTracing, + ): Description => { + const layerName = maybeGetReadableVolumeTracingName(tracing, firstAction.value.actionTracingId); + return { + description: `Added segment index to layer ${layerName} to enable segment statistics.`, + icon: , + }; + }, // This should never be shown since currently this update action can only be triggered // by merging or splitting trees which is recognized separately, before this description // is accessed. @@ -196,9 +250,17 @@ const descriptionFns: Record Descr updateVolumeTracing: (): Description => updateTracingDescription, } as const; +function maybeGetReadableVolumeTracingName(tracing: HybridTracing, tracingId: string): string { + const volumeTracing = tracing.volumes.find((volume) => volume.tracingId === tracingId); + return volumeTracing != null + ? getReadableNameByVolumeTracingId(tracing, volumeTracing.tracingId) + : ""; +} + function getDescriptionForSpecificBatch( actions: Array, type: string, + tracing: HybridTracing, ): Description { const firstAction = actions[0]; @@ -206,7 +268,7 @@ function getDescriptionForSpecificBatch( throw new Error("Type constraint violated"); } const fn = descriptionFns[type]; - return fn(firstAction, actions.length); + return fn(firstAction, actions.length, tracing); } // An update action batch can consist of more than one update action as a single user action @@ -220,7 +282,10 @@ function getDescriptionForSpecificBatch( // "more expressive" update actions first and for more general ones later. // The order is determined by the order in which the update actions are added to the // `descriptionFns` object. -function getDescriptionForBatch(actions: Array): Description { +function getDescriptionForBatch( + actions: Array, + tracing: HybridTracing, +): Description { const groupedUpdateActions = _.groupBy(actions, "name"); const moveTreeComponentUAs = groupedUpdateActions.moveTreeComponent; @@ -270,7 +335,7 @@ function getDescriptionForBatch(actions: Array): Description const updateActions = groupedUpdateActions[key]; if (updateActions != null) { - return getDescriptionForSpecificBatch(updateActions, key); + return getDescriptionForSpecificBatch(updateActions, key, tracing); } } @@ -302,6 +367,7 @@ export default function VersionEntry({ const contributors = useSelector((state: OxalisState) => state.tracing.contributors); const activeUser = useSelector((state: OxalisState) => state.activeUser); const owner = useSelector((state: OxalisState) => state.tracing.owner); + const tracing = useSelector((state: OxalisState) => state.tracing); const liClassName = classNames("version-entry", { "active-version-entry": isActive, @@ -317,7 +383,7 @@ export default function VersionEntry({ {allowUpdate ? "Restore" : "Download"} ); - const { description, icon } = getDescriptionForBatch(actions); + const { description, icon } = getDescriptionForBatch(actions, tracing); // In case the actionAuthorId is not set, the action was created before the multi-contributor // support. Default to the owner in that case. diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index c0d8a274830..20928c8345a 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -3,7 +3,6 @@ import { useState, useEffect } from "react"; import _ from "lodash"; import dayjs from "dayjs"; import type { APIUpdateActionBatch } from "types/api_flow_types"; -import type { Versions } from "oxalis/view/version_view"; import { chunkIntoTimeWindows } from "libs/utils"; import { getUpdateActionLog, @@ -13,7 +12,6 @@ import { import { handleGenericError } from "libs/error_handling"; import { pushSaveQueueTransaction, - type SaveQueueType, setVersionNumberAction, } from "oxalis/model/actions/save_actions"; import { @@ -24,21 +22,19 @@ import { import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { Model } from "oxalis/singletons"; -import type { EditableMapping, OxalisState, SkeletonTracing, VolumeTracing } from "oxalis/store"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; import VersionEntryGroup from "oxalis/view/version_entry_group"; import { api } from "oxalis/singletons"; -import Toast from "libs/toast"; import { useInfiniteQuery, useQueryClient } from "@tanstack/react-query"; import { useEffectOnlyOnce } from "libs/react_hooks"; import { useFetch } from "libs/react_helpers"; import { useSelector } from "react-redux"; +import { getCreationTimestamp } from "oxalis/model/accessors/annotation_accessor"; const ENTRIES_PER_PAGE = 5000; type Props = { - versionedObjectType: SaveQueueType; - tracing: SkeletonTracing | VolumeTracing | EditableMapping; allowUpdate: boolean; }; @@ -49,25 +45,18 @@ type GroupedAndChunkedVersions = Record - Model.getSegmentationTracingLayer(volumeTracingId), - ); - segmentationLayersToReload.push(...versionedSegmentationLayers); - } + // TODO: properly determine which layers to reload. + // No versions were passed which means that the newest annotation should be + // shown. Therefore, reload all segmentation layers. + segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); for (const segmentationLayer of segmentationLayersToReload) { segmentationLayer.cube.collectAllBuckets(); @@ -80,20 +69,13 @@ async function handleRestoreVersion( versions: APIUpdateActionBatch[], version: number, ) { - const getNewestVersion = () => _.max(versions.map((batch) => batch.version)) || 0; if (props.allowUpdate) { - Store.dispatch( - setVersionNumberAction( - getNewestVersion(), - props.versionedObjectType, - props.tracing.tracingId, - ), - ); + const newestVersion = _.max(versions.map((batch) => batch.version)) || 0; + Store.dispatch(setVersionNumberAction(newestVersion)); Store.dispatch( pushSaveQueueTransaction( [revertToVersion(version)], - props.versionedObjectType, - props.tracing.tracingId, + "experimental; leaving out tracingId as this should not be required", ), ); await Model.ensureSavedState(); @@ -102,28 +84,7 @@ async function handleRestoreVersion( } else { const { annotationType, annotationId, volumes } = Store.getState().tracing; const includesVolumeFallbackData = volumes.some((volume) => volume.fallbackLayer != null); - downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, { - [props.versionedObjectType]: version, - }); - } -} - -function handlePreviewVersion(props: Props, version: number) { - if (props.versionedObjectType === "skeleton") { - return previewVersion({ - skeleton: version, - }); - } else if (props.versionedObjectType === "volume") { - return previewVersion({ - volumes: { - [props.tracing.tracingId]: version, - }, - }); - } else { - Toast.warning( - `Version preview and restoring for ${props.versionedObjectType}s is not supported yet.`, - ); - return Promise.resolve(); + downloadAnnotation(annotationId, annotationType, includesVolumeFallbackData, version); } } @@ -146,7 +107,7 @@ const getGroupedAndChunkedVersions = _.memoize( ); async function getUpdateActionLogPage( - props: Props, + tracing: HybridTracing, tracingStoreUrl: string, annotationId: string, newestVersion: number, @@ -186,7 +147,7 @@ async function getUpdateActionLogPage( if (oldestVersionInPage === 1) { updateActionLog.push({ version: 0, - value: [serverCreateTracing(props.tracing.createdTimestamp)], + value: [serverCreateTracing(getCreationTimestamp(tracing))], }); } @@ -199,15 +160,15 @@ async function getUpdateActionLogPage( } function VersionList(props: Props) { - const { tracing } = props; const tracingStoreUrl = useSelector((state: OxalisState) => state.tracing.tracingStore.url); const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( () => getNewestVersionForTracing(tracingStoreUrl, annotationId), null, - [tracing], + [annotationId], ); + console.log("newestVersion", newestVersion); if (newestVersion == null) { return ( @@ -221,24 +182,26 @@ function VersionList(props: Props) { } function InnerVersionList(props: Props & { newestVersion: number }) { + const tracing = useSelector((state: OxalisState) => state.tracing); const queryClient = useQueryClient(); // Remember the version with which the version view was opened ( // the active version could change by the actions of the user). // Based on this version, the page numbers are calculated. const { newestVersion } = props; - const [initialVersion] = useState(props.tracing.version); + const [initialVersion] = useState(tracing.version); function fetchPaginatedVersions({ pageParam }: { pageParam?: number }) { + // TODO: maybe refactor this so that this method is not calculated very rendering cycle if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } const { url: tracingStoreUrl } = Store.getState().tracing.tracingStore; const annotationId = Store.getState().tracing.annotationId; - return getUpdateActionLogPage(props, tracingStoreUrl, annotationId, newestVersion, pageParam); + return getUpdateActionLogPage(tracing, tracingStoreUrl, annotationId, newestVersion, pageParam); } - const queryKey = ["versions", props.tracing.tracingId]; + const queryKey = ["versions", tracing.annotationId]; useEffectOnlyOnce(() => { // Remove all previous existent queries so that the content of this view @@ -330,11 +293,11 @@ function InnerVersionList(props: Props & { newestVersion: number }) { batches={batchesOrDateString} allowUpdate={props.allowUpdate} newestVersion={flattenedVersions[0].version} - activeVersion={props.tracing.version} + activeVersion={tracing.version} onRestoreVersion={(version) => handleRestoreVersion(props, flattenedVersions, version) } - onPreviewVersion={(version) => handlePreviewVersion(props, version)} + onPreviewVersion={(version) => previewVersion(version)} key={batchesOrDateString[0].version} /> ) diff --git a/frontend/javascripts/oxalis/view/version_view.tsx b/frontend/javascripts/oxalis/view/version_view.tsx index 4a8862b7719..e755a6c6f38 100644 --- a/frontend/javascripts/oxalis/view/version_view.tsx +++ b/frontend/javascripts/oxalis/view/version_view.tsx @@ -1,14 +1,14 @@ -import { Button, Alert, Tabs, type TabsProps } from "antd"; +import { Button, Alert } from "antd"; import { CloseOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; +import { connect, useDispatch } from "react-redux"; import * as React from "react"; -import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import type { OxalisState, Tracing } from "oxalis/store"; -import { type TracingType, TracingTypeEnum } from "types/api_flow_types"; import Store from "oxalis/store"; import VersionList, { previewVersion } from "oxalis/view/version_list"; +import { useState } from "react"; +import { useWillUnmount } from "beautiful-react-hooks"; export type Versions = { skeleton?: number | null | undefined; @@ -21,151 +21,84 @@ type OwnProps = { allowUpdate: boolean; }; type Props = StateProps & OwnProps; -type State = { - activeTracingType: TracingType; - initialAllowUpdate: boolean; -}; -class VersionView extends React.Component { - state: State = { - activeTracingType: - this.props.tracing.skeleton != null ? TracingTypeEnum.skeleton : TracingTypeEnum.volume, - // Remember whether the tracing could originally be updated - initialAllowUpdate: this.props.allowUpdate, - }; +const VersionView: React.FC = (props: Props) => { + const [initialAllowUpdate] = useState(props.allowUpdate); + const dispatch = useDispatch(); - componentWillUnmount() { - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - } + useWillUnmount(() => { + dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); + }); - handleClose = async () => { + const handleClose = async () => { // This will load the newest version of both skeleton and volume tracings await previewVersion(); Store.dispatch(setVersionRestoreVisibilityAction(false)); - Store.dispatch(setAnnotationAllowUpdateAction(this.state.initialAllowUpdate)); - }; - - onChangeTab = (activeKey: string) => { - this.setState({ - activeTracingType: activeKey as TracingType, - }); + Store.dispatch(setAnnotationAllowUpdateAction(initialAllowUpdate)); }; - render() { - const tabs: TabsProps["items"] = []; - - if (this.props.tracing.skeleton != null) - tabs.push({ - label: "Skeleton", - key: "skeleton", - children: ( - - ), - }); - - tabs.push( - ...this.props.tracing.volumes.map((volumeTracing) => ({ - label: getReadableNameByVolumeTracingId(this.props.tracing, volumeTracing.tracingId), - key: volumeTracing.tracingId, - children: ( - - ), - })), - ); - - tabs.push( - ...this.props.tracing.mappings.map((mapping) => ({ - label: `${getReadableNameByVolumeTracingId( - this.props.tracing, - mapping.tracingId, - )} (Editable Mapping)`, - key: mapping.tracingId, - children: ( - - ), - })), - ); - - return ( + return ( +
-
-

- Version History -

-
+ Version History + +
- ); - } -} +
+ +
+
+ ); +}; function mapStateToProps(state: OxalisState): StateProps { return { diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index de37bc59b18..0ba4b896605 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -551,6 +551,7 @@ type APIAnnotationBase = APIAnnotationInfo & { readonly owner?: APIUserBase; // This `user` attribute is deprecated and should not be used, anymore. It only exists to satisfy e2e type checks readonly user?: APIUserBase; + readonly version: number; readonly contributors: APIUserBase[]; readonly othersMayEdit: boolean; }; diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 2f34d15b159..9b1a7dd2d9d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -17,5 +17,7 @@ object AnnotationLayerType extends ExtendedEnumeration { p match { case AnnotationLayerTypeProto.skeleton => Skeleton case AnnotationLayerTypeProto.volume => Volume + case AnnotationLayerTypeProto.Unrecognized(_) => + Volume // unrecognized should never happen, artifact of proto code generation } } From 90d8e4d8fa298be99cf25e7819f2193f48fba62b Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 21 Oct 2024 16:40:30 +0200 Subject: [PATCH 112/361] AnnotationDefaults --- app/models/annotation/Annotation.scala | 9 +++++++-- app/models/annotation/AnnotationService.scala | 6 ++++-- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 493bce4bdd4..d608250a649 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -22,6 +22,11 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration +object AnnotationDefaults { + val defaultName: String = "" + val defaultDescription: String = "" +} + case class Annotation( _id: ObjectId, _dataset: ObjectId, @@ -29,9 +34,9 @@ case class Annotation( _team: ObjectId, _user: ObjectId, annotationLayers: List[AnnotationLayer], - description: String = "", + description: String = AnnotationDefaults.defaultDescription, visibility: AnnotationVisibility.Value = AnnotationVisibility.Internal, - name: String = "", + name: String = AnnotationDefaults.defaultName, viewConfiguration: Option[JsObject] = None, state: AnnotationState.Value = Active, isLockedByOwner: Boolean = false, diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index bda33814f37..2c7e94a3397 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -278,8 +278,10 @@ class AnnotationService @Inject()( AnnotationLayerType.toProto(l.typ) ) } - //TODO pass right name, description here - annotationProto = AnnotationProto(name = None, description = None, version = 0L, layers = layersProto) + annotationProto = AnnotationProto(name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + layers = layersProto) _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) } yield newAnnotationLayers From 759c2f30b223cbc32c4a476bb97c133ddb44e258 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 23 Oct 2024 09:49:35 +0200 Subject: [PATCH 113/361] fix after merge. report name + description to wk --- .../WKRemoteTracingStoreController.scala | 22 +++++-- conf/webknossos.latest.routes | 2 +- .../oxalis/model/sagas/save_saga.ts | 60 ++----------------- .../dataset_info_tab_view.tsx | 16 +---- .../com/scalableminds/util/tools/Fox.scala | 2 +- .../models/annotation/AnnotationLayer.scala | 1 + .../TSRemoteWebknossosClient.scala | 7 +++ .../annotation/TSAnnotationService.scala | 6 +- 8 files changed, 32 insertions(+), 84 deletions(-) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index eebbf594f88..1e05a4df3f5 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,6 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer @@ -17,6 +18,7 @@ import models.annotation.AnnotationState._ import models.annotation.{ Annotation, AnnotationDAO, + AnnotationDefaults, AnnotationInformationProvider, AnnotationLayerDAO, AnnotationService, @@ -58,23 +60,31 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - def updateAnnotationLayers(name: String, key: String, annotationId: String): Action[List[AnnotationLayer]] = - Action.async(validateJson[List[AnnotationLayer]]) { implicit request => + def updateAnnotation(name: String, key: String, annotationId: String): Action[AnnotationProto] = + Action.async(validateProto[AnnotationProto]) { implicit request => + // tracingstore only sends this request after ensuring write access + implicit val ctx: DBAccessContext = GlobalAccessContext for { annotationIdValidated <- ObjectId.fromString(annotationId) existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) + newLayersProto = request.body.layers existingLayerIds = existingLayers.map(_.tracingId).toSet - newLayerIds = request.body.map(_.tracingId).toSet + newLayerIds = newLayersProto.map(_.tracingId).toSet layerIdsToDelete = existingLayerIds.diff(newLayerIds) layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) layerIdsToInsert = newLayerIds.diff(existingLayerIds) _ <- Fox.serialCombined(layerIdsToDelete.toList)( annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) - _ <- Fox.serialCombined(request.body.filter(l => layerIdsToInsert.contains(l.tracingId)))( - annotationLayerDAO.insertOne(annotationIdValidated, _)) - _ <- Fox.serialCombined(request.body.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToInsert.contains(l.tracingId))) { layerProto => + annotationLayerDAO.insertOne(annotationIdValidated, AnnotationLayer.fromProto(layerProto)) + } + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) // Layer stats are ignored here, they are sent eagerly when saving updates + _ <- annotationDAO.updateName(annotationIdValidated, + request.body.name.getOrElse(AnnotationDefaults.defaultName)) + _ <- annotationDAO.updateDescription(annotationIdValidated, + request.body.description.getOrElse(AnnotationDefaults.defaultDescription)) } yield Ok } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index a1ace976868..e439b682927 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -121,7 +121,7 @@ PUT /datastores/:name # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() POST /tracingstores/:name/handleTracingUpdateReport controllers.WKRemoteTracingStoreController.handleTracingUpdateReport(name: String, key: String) -POST /tracingstores/:name/updateAnnotationLayers controllers.WKRemoteTracingStoreController.updateAnnotationLayers(name: String, key: String, annotationId: String) +POST /tracingstores/:name/updateAnnotation controllers.WKRemoteTracingStoreController.updateAnnotation(name: String, key: String, annotationId: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, key: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 68ad8eabd3d..d7f7411738e 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -9,17 +9,8 @@ import window, { alert, document, location } from "libs/window"; import _ from "lodash"; import messages from "messages"; import { ControlModeEnum } from "oxalis/constants"; -<<<<<<< HEAD -import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; -||||||| 934bb6aa9b -import { getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; -======= import { getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import { selectQueue } from "oxalis/model/accessors/save_accessor"; ->>>>>>> master import { selectTracing } from "oxalis/model/accessors/tracing_accessor"; -import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; import { pushSaveQueueTransaction, @@ -286,7 +277,6 @@ export function* sendSaveRequestToServer(): Saga { } } -<<<<<<< HEAD function* markBucketsAsNotDirty(saveQueue: Array) { for (const saveEntry of saveQueue) { for (const updateAction of saveEntry.actions) { @@ -295,59 +285,17 @@ function* markBucketsAsNotDirty(saveQueue: Array) { // an updateBucket action. const { actionTracingId: tracingId } = updateAction.value; const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationResolutionInfo = yield* call( - getResolutionInfo, - segmentationLayer.resolutions, - ); -||||||| 934bb6aa9b -function* markBucketsAsNotDirty(saveQueue: Array, tracingId: string) { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationResolutionInfo = yield* call(getResolutionInfo, segmentationLayer.resolutions); -======= -function* markBucketsAsNotDirty(saveQueue: Array, tracingId: string) { - const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); - const segmentationResolutionInfo = yield* call(getMagInfo, segmentationLayer.resolutions); ->>>>>>> master - -<<<<<<< HEAD + const segmentationResolutionInfo = yield* call(getMagInfo, segmentationLayer.resolutions); + const { position, mag, additionalCoordinates } = updateAction.value; - const resolutionIndex = segmentationResolutionInfo.getIndexByResolution(mag); + const resolutionIndex = segmentationResolutionInfo.getIndexByMag(mag); const zoomedBucketAddress = globalPositionToBucketPosition( position, - segmentationResolutionInfo.getDenseResolutions(), + segmentationResolutionInfo.getDenseMags(), resolutionIndex, additionalCoordinates, ); const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); -||||||| 934bb6aa9b - if (segmentationLayer != null) { - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { position, mag, additionalCoordinates } = updateAction.value; - const resolutionIndex = segmentationResolutionInfo.getIndexByResolution(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationResolutionInfo.getDenseResolutions(), - resolutionIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); -======= - if (segmentationLayer != null) { - for (const saveEntry of saveQueue) { - for (const updateAction of saveEntry.actions) { - if (updateAction.name === "updateBucket") { - const { position, mag, additionalCoordinates } = updateAction.value; - const resolutionIndex = segmentationResolutionInfo.getIndexByMag(mag); - const zoomedBucketAddress = globalPositionToBucketPosition( - position, - segmentationResolutionInfo.getDenseMags(), - resolutionIndex, - additionalCoordinates, - ); - const bucket = segmentationLayer.cube.getOrCreateBucket(zoomedBucketAddress); ->>>>>>> master if (bucket.type === "null") { continue; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 670bcf59df4..bf5b4f783cd 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -12,22 +12,8 @@ import { getDatasetExtentAsString, getMagnificationUnion, } from "oxalis/model/accessors/dataset_accessor"; -<<<<<<< HEAD -import { getActiveResolutionInfo } from "oxalis/model/accessors/flycam_accessor"; -import { getStats, type CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; -||||||| 934bb6aa9b -import { getActiveResolutionInfo } from "oxalis/model/accessors/flycam_accessor"; -import { - getCombinedStats, - type CombinedTracingStats, -} from "oxalis/model/accessors/annotation_accessor"; -======= import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; -import { - getCombinedStats, - type CombinedTracingStats, -} from "oxalis/model/accessors/annotation_accessor"; ->>>>>>> master +import { getStats, type CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; import { setAnnotationNameAction, setAnnotationDescriptionAction, diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 59af5b50d28..2195b751eef 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -133,7 +133,7 @@ object Fox extends FoxImplicits { } // Run serially, fail on the first failure - def serialCombined[A, B](l: List[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = + def serialCombined[A, B](l: Iterable[A])(f: A => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = serialCombined(l.iterator)(f) // Run serially, fail on the first failure diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index 1c8e827d31c..5b1ba9b6607 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -11,6 +11,7 @@ import scalapb.GeneratedMessage import scala.concurrent.ExecutionContext +// TODO can this be moved back to wk-core backend? case class AnnotationLayer( tracingId: String, typ: AnnotationLayerType, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 52ddf6f03f4..523daee164b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -5,6 +5,7 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} @@ -99,6 +100,12 @@ class TSRemoteWebknossosClient @Inject()( .addQueryString("key" -> tracingStoreKey) .postJson(annotationLayers) + def updateAnnotation(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotation") + .addQueryString("annotationId" -> annotationId) + .addQueryString("key" -> tracingStoreKey) + .postProto(annotationProto) + def createTracingFor(annotationId: String, layerParameters: AnnotationLayerParameters, previousVersion: Long): Fox[Either[SkeletonTracing, VolumeTracing]] = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 06386265681..c98ba090cb3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -10,7 +10,6 @@ import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappin import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingLayer, EditableMappingService, @@ -441,10 +440,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) - _ <- remoteWebknossosClient.updateAnnotationLayers(annotationId, - updatedWithNewVerson.annotation.layers - .map(AnnotationLayer.fromProto) - .toList) // TODO perf: skip if no layer changes + _ <- remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVerson.annotation) // TODO perf: skip if annotation is identical } yield updatedWithNewVerson } } From 1b8a3d51ac5b93566bc66e5956c5ef2719f6c5b3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 23 Oct 2024 09:55:31 +0200 Subject: [PATCH 114/361] skip reporting changes to postgres when loading old versions --- .../annotation/TSAnnotationService.scala | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c98ba090cb3..70a285483ce 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -204,6 +204,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { targetVersion <- determineTargetVersion(annotationId, version) ?~> "determineTargetVersion.failed" + // When requesting any other than the newest version, do not consider the changes final + reportChangesToWk = version.isEmpty || version.contains(targetVersion) updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( (annotationId, targetVersion, requestedSkeletonTracingIds, requestedVolumeTracingIds, requestAll), _ => @@ -212,7 +214,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion, requestedSkeletonTracingIds, requestedVolumeTracingIds, - requestAll = true) // TODO can we request fewer to save perf? still need to avoid duplicate apply + requestAll = true, + reportChangesToWk = reportChangesToWk) // TODO can we request fewer to save perf? still need to avoid duplicate apply ) } yield updatedAnnotation @@ -221,7 +224,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version: Long, requestedSkeletonTracingIds: List[String], requestedVolumeTracingIds: List[String], - requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + requestAll: Boolean, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" @@ -234,7 +238,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version, requestedSkeletonTracingIds, requestedVolumeTracingIds, - requestAll) ?~> "applyUpdates.failed" + requestAll, + reportChangesToWk) ?~> "applyUpdates.failed" } yield updated def getEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( @@ -268,7 +273,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion: Long, requestedSkeletonTracingIds: List[String], requestedVolumeTracingIds: List[String], - requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + requestAll: Boolean, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) @@ -284,7 +290,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updatesFlat, annotation.version, targetVersion) // TODO: targetVersion should be set per update group - updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped) ?~> "applyUpdates.inner.failed" + updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, + annotationId, + updatesGroupsRegrouped, + reportChangesToWk) ?~> "applyUpdates.inner.failed" } yield updated private def findEditableMappingsForUpdates( // TODO integrate with findTracings? @@ -390,7 +399,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def applyUpdatesGrouped( annotation: AnnotationWithTracings, annotationId: String, - updateGroups: List[(Long, List[UpdateAction])] + updateGroups: List[(Long, List[UpdateAction])], + reportChangesToWk: Boolean )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { def updateGroupedIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdateGroups: List[(Long, List[UpdateAction])]): Fox[AnnotationWithTracings] = @@ -413,7 +423,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotation: AnnotationWithTracings, annotationId: String, updates: List[UpdateAction], - targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { + targetVersion: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { logger.info(s"applying ${updates.length} to go from v${annotation.version} to v$targetVersion") From ee89ea790d146dd9dc5ab5f1e97c031f8fbac14d Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 23 Oct 2024 10:10:14 +0200 Subject: [PATCH 115/361] fix param, remove addSegmentIndex route --- app/controllers/AnnotationController.scala | 65 ------------------- .../WKRemoteTracingStoreClient.scala | 8 --- conf/webknossos.latest.routes | 1 - .../annotation/TSAnnotationService.scala | 5 +- .../controllers/TSAnnotationController.scala | 18 +---- .../controllers/VolumeTracingController.scala | 26 -------- .../volume/VolumeTracingService.scala | 55 ---------------- ...alableminds.webknossos.tracingstore.routes | 1 - 8 files changed, 6 insertions(+), 173 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index d5a1943f01f..81c1aa9f332 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -25,7 +25,6 @@ import models.task.TaskDAO import models.team.{TeamDAO, TeamService} import models.user.time._ import models.user.{User, UserDAO, UserService} -import net.liftweb.common.Box import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -258,70 +257,6 @@ class AnnotationController @Inject()( } yield result } - def addSegmentIndicesToAll(parallelBatchCount: Int, - dryRun: Boolean, - skipTracings: Option[String]): Action[AnyContent] = - sil.SecuredAction.async { implicit request => - { - for { - _ <- userService.assertIsSuperUser(request.identity._multiUser) ?~> "notAllowed" ~> FORBIDDEN - _ = logger.info("Running migration to add segment index to all volume annotation layers...") - skipTracingsSet = skipTracings.map(_.split(",").toSet).getOrElse(Set()) - _ = if (skipTracingsSet.nonEmpty) { - logger.info(f"Skipping these tracings: ${skipTracingsSet.mkString(",")}") - } - _ = logger.info("Gathering list of volume tracings...") - annotationLayers <- annotationLayerDAO.findAllVolumeLayers - annotationLayersFiltered = annotationLayers.filter(l => !skipTracingsSet.contains(l.tracingId)) - totalCount = annotationLayersFiltered.length - batches = batch(annotationLayersFiltered, parallelBatchCount) - _ = logger.info(f"Processing $totalCount tracings in ${batches.length} batches") - before = Instant.now - results: Seq[List[Box[Unit]]] <- Fox.combined(batches.zipWithIndex.map { - case (batch, index) => addSegmentIndicesToBatch(batch, index, dryRun) - }) - failures = results.flatMap(_.filter(_.isEmpty)) - failureCount: Int = failures.length - successCount: Int = results.map(_.count(_.isDefined)).sum - msg = s"All done (dryRun=$dryRun)! Processed $totalCount tracings in ${batches.length} batches. Took ${Instant - .since(before)}. $failureCount failures, $successCount successes." - _ = if (failures.nonEmpty) { - failures.foreach { failedBox => - logger.info(f"Failed: $failedBox") - } - } - _ = logger.info(msg) - } yield JsonOk(msg) - } - } - - private def addSegmentIndicesToBatch(annotationLayerBatch: List[AnnotationLayer], batchIndex: Int, dryRun: Boolean)( - implicit ec: ExecutionContext) = { - var processedCount = 0 - for { - tracingStore <- tracingStoreDAO.findFirst(GlobalAccessContext) ?~> "tracingStore.notFound" - client = new WKRemoteTracingStoreClient(tracingStore, null, rpc, tracingDataSourceTemporaryStore) - batchCount = annotationLayerBatch.length - results <- Fox.serialSequenceBox(annotationLayerBatch) { annotationLayer => - processedCount += 1 - logger.info( - f"Processing tracing ${annotationLayer.tracingId}. $processedCount of $batchCount in batch $batchIndex (${percent(processedCount, batchCount)})...") - client.addSegmentIndex(annotationLayer.tracingId, dryRun) ?~> s"add segment index failed for ${annotationLayer.tracingId}" - } - _ = logger.info(f"Batch $batchIndex is done. Processed ${annotationLayerBatch.length} tracings.") - } yield results - } - - private def batch[T](allItems: List[T], batchCount: Int): List[List[T]] = { - val batchSize: Int = Math.max(Math.min(allItems.length / batchCount, allItems.length), 1) - allItems.grouped(batchSize).toList - } - - private def percent(done: Int, pending: Int) = { - val value = done.toDouble / pending.toDouble * 100 - f"$value%1.1f %%" - } - private def finishAnnotation(typ: String, id: String, issuingUser: User, timestamp: Instant)( implicit ctx: DBAccessContext): Fox[(Annotation, String)] = for { diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index de54a505a10..521baabc1c5 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -128,14 +128,6 @@ class WKRemoteTracingStoreClient( .postJsonWithJsonResponse[Option[BoundingBox], String](datasetBoundingBox) } - def addSegmentIndex(volumeTracingId: String, dryRun: Boolean): Fox[Unit] = - rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/addSegmentIndex").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("dryRun" -> dryRun.toString) - .silent - .post() - .map(_ => ()) - def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds").withLongTimeout diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index e439b682927..9bd9313e70e 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -152,7 +152,6 @@ DELETE /annotations/:id POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) -PATCH /annotations/addSegmentIndicesToAll controllers.AnnotationController.addSegmentIndicesToAll(parallelBatchCount: Int, dryRun: Boolean, skipTracings: Option[String]) GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 70a285483ce..b617a1cf378 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -410,8 +410,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remainingUpdateGroups match { case List() => Fox.successful(annotationWithTracings) case updateGroup :: tail => - updateGroupedIter(applyUpdates(annotationWithTracings, annotationId, updateGroup._2, updateGroup._1), - tail) + updateGroupedIter( + applyUpdates(annotationWithTracings, annotationId, updateGroup._2, updateGroup._1, reportChangesToWk), + tail) } case _ => annotationWithTracingsFox } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 755a31aee67..97315646e2c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -59,7 +59,9 @@ class TSAnnotationController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { newestMaterializableVersion <- annotationService.currentMaterializableVersion(annotationId) - updateLog <- annotationService.updateActionLog(annotationId, newestVersion.getOrElse(newestMaterializableVersion), oldestVersion.getOrElse(0)) + updateLog <- annotationService.updateActionLog(annotationId, + newestVersion.getOrElse(newestMaterializableVersion), + oldestVersion.getOrElse(0)) } yield Ok(updateLog) } } @@ -99,17 +101,3 @@ class TSAnnotationController @Inject()( } } - -// get version history - -// update layer - -// restore of layer - -// delete layer - -// add layer - -// skeleton + volume routes can now take annotationVersion - -// Is an editable mapping a layer? diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index e0c675d94c5..6608d2a14f2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -2,7 +2,6 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} @@ -344,31 +343,6 @@ class VolumeTracingController @Inject()( } } - def addSegmentIndex(tracingId: String, dryRun: Boolean): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") - currentVersion <- annotationService.currentMaterializableVersion(tracingId) - before = Instant.now - canAddSegmentIndex <- volumeTracingService.checkIfSegmentIndexMayBeAdded(tracingId, tracing) - processedBucketCountOpt <- Fox.runIf(canAddSegmentIndex)(volumeTracingService - .addSegmentIndex(annotationId, tracingId, tracing, currentVersion, dryRun)) ?~> "addSegmentIndex.failed" - currentVersionNew <- annotationService.currentMaterializableVersion(tracingId) - _ <- Fox.runIf(!dryRun)(bool2Fox( - processedBucketCountOpt.isEmpty || currentVersionNew == currentVersion + 1L) ?~> "Version increment failed. Looks like someone edited the annotation layer in the meantime.") - duration = Instant.since(before) - _ = processedBucketCountOpt.foreach { processedBucketCount => - logger.info( - s"Added segment index (dryRun=$dryRun) for tracing $tracingId. Took $duration for $processedBucketCount buckets") - } - } yield Ok - } - } - } - def requestAdHocMesh(tracingId: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 06a9ef08973..25e61a4c287 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -811,61 +811,6 @@ class VolumeTracingService @Inject()( } } - def addSegmentIndex(annotationId: String, - tracingId: String, - tracing: VolumeTracing, - currentVersion: Long, - dryRun: Boolean)(implicit tc: TokenContext): Fox[Option[Int]] = { - var processedBucketCount = 0 - for { - isTemporaryTracing <- isTemporaryTracing(tracingId) - sourceDataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - fallbackLayer <- getFallbackLayer(tracingId, tracing) - mappingName <- selectMappingName(tracing) - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - currentVersion + 1L, - remoteDatastoreClient, - fallbackLayer, - sourceDataLayer.additionalAxes, - tc) - _ <- Fox.serialCombined(buckets) { - case (bucketPosition, bucketData) => - processedBucketCount += 1 - updateSegmentIndex(segmentIndexBuffer, - bucketPosition, - bucketData, - Empty, - tracing.elementClass, - mappingName, - editableMappingTracingId(tracing, tracingId)) - } - _ <- Fox.runIf(!dryRun)(segmentIndexBuffer.flush()) - updateGroup = UpdateActionGroup( - tracing.version + 1L, - System.currentTimeMillis(), - None, - List(AddSegmentIndexVolumeAction(tracingId)), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - // TODO _ <- Fox.runIf(!dryRun)(handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken)) - } yield Some(processedBucketCount) - } - - def checkIfSegmentIndexMayBeAdded(tracingId: String, tracing: VolumeTracing)(implicit ec: ExecutionContext, - tc: TokenContext): Fox[Boolean] = - for { - fallbackLayerOpt <- Fox.runIf(tracing.fallbackLayer.isDefined)( - remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - canHaveSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayerOpt) - alreadyHasSegmentIndex = tracing.hasSegmentIndex.getOrElse(false) - } yield canHaveSegmentIndex && !alreadyHasSegmentIndex - def importVolumeData(annotationId: String, tracingId: String, tracing: VolumeTracing, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index dcb3b41ab8b..df237857e93 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -24,7 +24,6 @@ POST /volume/:tracingId/adHocMesh POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(tracingId: String) -POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(tracingId: String, dryRun: Boolean) GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(tracingId: String) POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) From a7794d23702ed6a4ee89d1b49fae6e1eeee5bcd1 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 23 Oct 2024 10:19:00 +0200 Subject: [PATCH 116/361] WIP import volume data --- .../controllers/VolumeTracingController.scala | 15 ++++++++------- .../tracings/volume/VolumeTracingService.scala | 11 ++++------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 6608d2a14f2..0b0305f627c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -22,12 +22,12 @@ import com.scalableminds.webknossos.datastore.models.{ } import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} -import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - MergedVolumeStats, MagRestrictions, + MergedVolumeStats, TSFullMeshService, VolumeDataZipFormat, VolumeSegmentIndexService, @@ -60,6 +60,7 @@ class VolumeTracingController @Inject()( editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, val remoteWebknossosClient: TSRemoteWebknossosClient, + annotationTransactionService: AnnotationTransactionService, volumeSegmentStatisticsService: VolumeSegmentStatisticsService, volumeSegmentIndexService: VolumeSegmentIndexService, fullMeshService: TSFullMeshService, @@ -333,11 +334,11 @@ class VolumeTracingController @Inject()( tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - largestSegmentId <- volumeTracingService.importVolumeData(annotationId, - tracingId, - tracing, - zipFile, - currentVersion) + (updateGroup, largestSegmentId) <- volumeTracingService.importVolumeData(tracingId, + tracing, + zipFile, + currentVersion) + _ <- annotationTransactionService.handleUpdateGroups(annotationId, List(updateGroup)) } yield Ok(Json.toJson(largestSegmentId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 25e61a4c287..1f0d68a805d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -811,11 +811,9 @@ class VolumeTracingService @Inject()( } } - def importVolumeData(annotationId: String, - tracingId: String, - tracing: VolumeTracing, - zipFile: File, - currentVersion: Int)(implicit mp: MessagesProvider, tc: TokenContext): Fox[Long] = + def importVolumeData(tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int)( + implicit mp: MessagesProvider, + tc: TokenContext): Fox[(UpdateActionGroup, Long)] = if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { @@ -881,8 +879,7 @@ class VolumeTracingService @Inject()( 1, 0 ) - // TODO: _ <- handleUpdateGroup(tracingId, updateGroup, tracing.version, userToken) - } yield mergedVolume.largestSegmentId.toPositiveLong + } yield (updateGroup, mergedVolume.largestSegmentId.toPositiveLong) } } From f150bf26af421197ccdcbf8d89c4bb3d79ff1725 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 23 Oct 2024 11:17:08 +0200 Subject: [PATCH 117/361] sort out some update actions --- .../UpdateGroupHandlingUnitTestSuite.scala | 6 +- .../annotation/AnnotationReversion.scala | 4 +- .../AnnotationTransactionService.scala | 2 +- .../annotation/AnnotationUpdateActions.scala | 108 +++++++++++------- .../annotation/AnnotationWithTracings.scala | 8 +- .../annotation/TSAnnotationService.scala | 22 ++-- .../annotation/UpdateActions.scala | 64 +++++------ .../annotation/UpdateGroupHandling.scala | 4 +- .../EditableMappingUpdater.scala | 4 +- .../updating/SkeletonUpdateActions.scala | 24 +--- .../tracings/volume/VolumeUpdateActions.scala | 25 +--- 11 files changed, 126 insertions(+), 145 deletions(-) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala index dec49103cd1..a2d27f7f00a 100644 --- a/test/backend/UpdateGroupHandlingUnitTestSuite.scala +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -1,7 +1,7 @@ package backend -import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionUpdateAction, UpdateGroupHandling} -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{MergeTreeSkeletonAction} +import com.scalableminds.webknossos.tracingstore.annotation.{RevertToVersionAnnotationAction, UpdateGroupHandling} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.MergeTreeSkeletonAction import org.scalatestplus.play.PlaySpec class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling { @@ -16,7 +16,7 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling )), (6L, List( - RevertToVersionUpdateAction(sourceVersion = 1), + RevertToVersionAnnotationAction(sourceVersion = 1), )), (7L, List( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index b85e053366d..f7108ffbaad 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -14,7 +14,7 @@ trait AnnotationReversion { def revertDistributedElements(annotationId: String, currentAnnotationWithTracings: AnnotationWithTracings, sourceAnnotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction, + revertAction: RevertToVersionAnnotationAction, newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = // TODO segment index, volume buckets, proofreading data for { @@ -36,7 +36,7 @@ trait AnnotationReversion { } yield () private def revertEditableMappingFields(currentAnnotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction, + revertAction: RevertToVersionAnnotationAction, tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = for { updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 2399ed2549e..2bad1a3e045 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -229,7 +229,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe } actionsWithInfo.map { case a: UpdateBucketVolumeAction => a.withoutBase64Data - case a: AddLayerAnnotationUpdateAction => a.copy(tracingId = Some(TracingId.generate)) + case a: AddLayerAnnotationAction => a.copy(tracingId = Some(TracingId.generate)) case a => a } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index d000402ef0a..4d44bcc1407 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -18,14 +18,15 @@ object AnnotationLayerParameters { Json.using[WithDefaultValues].format[AnnotationLayerParameters] } -trait AnnotationUpdateAction extends ApplyImmediatelyUpdateAction +trait AnnotationUpdateAction extends UpdateAction -case class AddLayerAnnotationUpdateAction(layerParameters: AnnotationLayerParameters, - tracingId: Option[String] = None, // filled in by backend eagerly on save - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends AnnotationUpdateAction { +case class AddLayerAnnotationAction(layerParameters: AnnotationLayerParameters, + tracingId: Option[String] = None, // filled in by backend eagerly on save + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) @@ -33,13 +34,14 @@ case class AddLayerAnnotationUpdateAction(layerParameters: AnnotationLayerParame this.copy(actionAuthorId = authorId) } -case class DeleteLayerAnnotationUpdateAction(tracingId: String, - layerName: String, // Just stored for nicer-looking history - `type`: AnnotationLayerType, // Just stored for nicer-looking history - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends AnnotationUpdateAction { +case class DeleteLayerAnnotationAction(tracingId: String, + layerName: String, // Just stored for nicer-looking history + `type`: AnnotationLayerType, // Just stored for nicer-looking history + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) @@ -47,12 +49,13 @@ case class DeleteLayerAnnotationUpdateAction(tracingId: String, this.copy(actionAuthorId = authorId) } -case class UpdateLayerMetadataAnnotationUpdateAction(tracingId: String, - layerName: String, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends AnnotationUpdateAction { +case class UpdateLayerMetadataAnnotationAction(tracingId: String, + layerName: String, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) @@ -60,12 +63,13 @@ case class UpdateLayerMetadataAnnotationUpdateAction(tracingId: String, this.copy(actionAuthorId = authorId) } -case class UpdateMetadataAnnotationUpdateAction(name: Option[String], - description: Option[String], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends AnnotationUpdateAction { +case class UpdateMetadataAnnotationAction(name: Option[String], + description: Option[String], + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) @@ -73,33 +77,51 @@ case class UpdateMetadataAnnotationUpdateAction(name: Option[String], this.copy(actionAuthorId = authorId) } -case class RevertToVersionUpdateAction(sourceVersion: Long, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class RevertToVersionAnnotationAction(sourceVersion: Long, + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + +case class UpdateTdCameraAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends AnnotationUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + + override def isViewOnlyChange: Boolean = true } -object AddLayerAnnotationUpdateAction { - implicit val jsonFormat: OFormat[AddLayerAnnotationUpdateAction] = Json.format[AddLayerAnnotationUpdateAction] +object AddLayerAnnotationAction { + implicit val jsonFormat: OFormat[AddLayerAnnotationAction] = Json.format[AddLayerAnnotationAction] +} +object DeleteLayerAnnotationAction { + implicit val jsonFormat: OFormat[DeleteLayerAnnotationAction] = Json.format[DeleteLayerAnnotationAction] } -object DeleteLayerAnnotationUpdateAction { - implicit val jsonFormat: OFormat[DeleteLayerAnnotationUpdateAction] = Json.format[DeleteLayerAnnotationUpdateAction] +object UpdateLayerMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateLayerMetadataAnnotationAction] = + Json.format[UpdateLayerMetadataAnnotationAction] } -object UpdateLayerMetadataAnnotationUpdateAction { - implicit val jsonFormat: OFormat[UpdateLayerMetadataAnnotationUpdateAction] = - Json.format[UpdateLayerMetadataAnnotationUpdateAction] +object UpdateMetadataAnnotationAction { + implicit val jsonFormat: OFormat[UpdateMetadataAnnotationAction] = + Json.format[UpdateMetadataAnnotationAction] } -object UpdateMetadataAnnotationUpdateAction { - implicit val jsonFormat: OFormat[UpdateMetadataAnnotationUpdateAction] = - Json.format[UpdateMetadataAnnotationUpdateAction] +object RevertToVersionAnnotationAction { + implicit val jsonFormat: OFormat[RevertToVersionAnnotationAction] = + Json.format[RevertToVersionAnnotationAction] } -object RevertToVersionUpdateAction { - implicit val jsonFormat: OFormat[RevertToVersionUpdateAction] = - Json.format[RevertToVersionUpdateAction] +object UpdateTdCameraAnnotationAction { + implicit val jsonFormat: OFormat[UpdateTdCameraAnnotationAction] = Json.format[UpdateTdCameraAnnotationAction] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 6996b398d6d..fddeda5bf6b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -78,7 +78,7 @@ case class AnnotationWithTracings( def version: Long = annotation.version - def addLayer(a: AddLayerAnnotationUpdateAction, + def addLayer(a: AddLayerAnnotationAction, tracingId: String, tracing: Either[SkeletonTracing, VolumeTracing]): AnnotationWithTracings = this.copy( @@ -91,14 +91,14 @@ case class AnnotationWithTracings( tracingsById = tracingsById.updated(tracingId, tracing) ) - def deleteTracing(a: DeleteLayerAnnotationUpdateAction): AnnotationWithTracings = + def deleteTracing(a: DeleteLayerAnnotationAction): AnnotationWithTracings = this.copy(annotation = annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) - def updateLayerMetadata(a: UpdateLayerMetadataAnnotationUpdateAction): AnnotationWithTracings = + def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = this.copy(annotation = annotation.copy(layers = annotation.layers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) - def updateMetadata(a: UpdateMetadataAnnotationUpdateAction): AnnotationWithTracings = + def updateMetadata(a: UpdateMetadataAnnotationAction): AnnotationWithTracings = this.copy(annotation = annotation.copy(name = a.name, description = a.description)) def withVersion(newVersion: Long): AnnotationWithTracings = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index b617a1cf378..1240e66c9bf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -106,13 +106,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { updated <- updateAction match { - case a: AddLayerAnnotationUpdateAction => + case a: AddLayerAnnotationAction => addLayer(annotationId, annotationWithTracings, a, targetVersion) - case a: DeleteLayerAnnotationUpdateAction => + case a: DeleteLayerAnnotationAction => Fox.successful(annotationWithTracings.deleteTracing(a)) - case a: UpdateLayerMetadataAnnotationUpdateAction => + case a: UpdateLayerMetadataAnnotationAction => Fox.successful(annotationWithTracings.updateLayerMetadata(a)) - case a: UpdateMetadataAnnotationUpdateAction => + case a: UpdateMetadataAnnotationAction => Fox.successful(annotationWithTracings.updateMetadata(a)) case a: SkeletonUpdateAction => annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" @@ -125,7 +125,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationWithTracings.applyVolumeAction(a) case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) - case a: RevertToVersionUpdateAction => + case a: RevertToVersionAnnotationAction => revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO if the revert action is not isolated, we need not the target version of all but the target version of this update case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. @@ -135,7 +135,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def addLayer(annotationId: String, annotationWithTracings: AnnotationWithTracings, - action: AddLayerAnnotationUpdateAction, + action: AddLayerAnnotationAction, targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" @@ -146,10 +146,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield updated private def revertToVersion( - annotationId: String, - annotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionUpdateAction, - newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionAnnotationAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = // Note: works only after “ironing out” the update action groups // TODO: read old annotationProto, tracing, buckets, segment indeces for { @@ -164,7 +164,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- revertDistributedElements(annotationId, annotationWithTracings, sourceAnnotation, revertAction, newVersion) } yield sourceAnnotation - def createTracing(a: AddLayerAnnotationUpdateAction)( + def createTracing(a: AddLayerAnnotationAction)( implicit ec: ExecutionContext): Fox[Either[SkeletonTracing, VolumeTracing]] = Fox.failure("not implemented") // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index b13a71c58db..091ae7f327e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -14,7 +14,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ MergeTreeSkeletonAction, MoveTreeComponentSkeletonAction, UpdateNodeSkeletonAction, - UpdateTdCameraSkeletonAction, UpdateTracingSkeletonAction, UpdateTreeEdgesVisibilitySkeletonAction, UpdateTreeGroupVisibilitySkeletonAction, @@ -35,7 +34,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ UpdateMappingNameVolumeAction, UpdateSegmentGroupsVolumeAction, UpdateSegmentVolumeAction, - UpdateTdCameraVolumeAction, UpdateTracingVolumeAction, UpdateUserBoundingBoxVisibilityVolumeAction, UpdateUserBoundingBoxesVolumeAction @@ -90,30 +88,29 @@ object UpdateAction { case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) case "updateUserBoundingBoxesInVolumeTracing" => - deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) // TODO: rename key (must be different from skeleton action) + deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) case "updateUserBoundingBoxVisibilityInVolumeTracing" => deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) - case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) - case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) - case "updateTdCameraSkeleton" => deserialize[UpdateTdCameraSkeletonAction](jsonValue) // TODO deduplicate? - case "updateTdCameraVolume" => deserialize[UpdateTdCameraVolumeAction](jsonValue) - case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) - case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) - case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) - case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) - case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) - case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) // Editable Mapping case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) // Annotation - case "addLayerToAnnotation" => deserialize[AddLayerAnnotationUpdateAction](jsonValue) - case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationUpdateAction](jsonValue) - case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationUpdateAction](jsonValue) - case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationUpdateAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionUpdateAction](jsonValue) + case "addLayerToAnnotation" => deserialize[AddLayerAnnotationAction](jsonValue) + case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationAction](jsonValue) + case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) + case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") } @@ -170,8 +167,6 @@ object UpdateAction { case s: UpdateUserBoundingBoxVisibilitySkeletonAction => Json.obj("name" -> "updateUserBoundingBoxVisibilityInSkeletonTracing", "value" -> Json.toJson(s)(UpdateUserBoundingBoxVisibilitySkeletonAction.jsonFormat)) - case s: UpdateTdCameraSkeletonAction => - Json.obj("name" -> "updateTdCameraSkeleton", "value" -> Json.toJson(s)(UpdateTdCameraSkeletonAction.jsonFormat)) // Volume case s: UpdateBucketVolumeAction => @@ -188,8 +183,6 @@ object UpdateAction { Json.obj("name" -> "removeFallbackLayer", "value" -> Json.toJson(s)(RemoveFallbackLayerVolumeAction.jsonFormat)) case s: ImportVolumeDataVolumeAction => Json.obj("name" -> "importVolumeTracing", "value" -> Json.toJson(s)(ImportVolumeDataVolumeAction.jsonFormat)) - case s: UpdateTdCameraVolumeAction => - Json.obj("name" -> "updateTdCameraVolume", "value" -> Json.toJson(s)(UpdateTdCameraVolumeAction.jsonFormat)) case s: CreateSegmentVolumeAction => Json.obj("name" -> "createSegment", "value" -> Json.toJson(s)(CreateSegmentVolumeAction.jsonFormat)) case s: UpdateSegmentVolumeAction => @@ -209,19 +202,21 @@ object UpdateAction { Json.obj("name" -> "mergeAgglomerate", "value" -> Json.toJson(s)(MergeAgglomerateUpdateAction.jsonFormat)) // Annotation - case s: AddLayerAnnotationUpdateAction => - Json.obj("name" -> "addLayerToAnnotation", "value" -> Json.toJson(s)(AddLayerAnnotationUpdateAction.jsonFormat)) - case s: DeleteLayerAnnotationUpdateAction => + case s: AddLayerAnnotationAction => + Json.obj("name" -> "addLayerToAnnotation", "value" -> Json.toJson(s)(AddLayerAnnotationAction.jsonFormat)) + case s: DeleteLayerAnnotationAction => Json.obj("name" -> "deleteLayerFromAnnotation", - "value" -> Json.toJson(s)(DeleteLayerAnnotationUpdateAction.jsonFormat)) - case s: UpdateLayerMetadataAnnotationUpdateAction => + "value" -> Json.toJson(s)(DeleteLayerAnnotationAction.jsonFormat)) + case s: UpdateLayerMetadataAnnotationAction => Json.obj("name" -> "updateLayerMetadata", - "value" -> Json.toJson(s)(UpdateLayerMetadataAnnotationUpdateAction.jsonFormat)) - case s: UpdateMetadataAnnotationUpdateAction => + "value" -> Json.toJson(s)(UpdateLayerMetadataAnnotationAction.jsonFormat)) + case s: UpdateMetadataAnnotationAction => Json.obj("name" -> "updateMetadataOfAnnotation", - "value" -> Json.toJson(s)(UpdateMetadataAnnotationUpdateAction.jsonFormat)) - case s: RevertToVersionUpdateAction => - Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionUpdateAction.jsonFormat)) + "value" -> Json.toJson(s)(UpdateMetadataAnnotationAction.jsonFormat)) + case s: RevertToVersionAnnotationAction => + Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAnnotationAction.jsonFormat)) + case s: UpdateTdCameraAnnotationAction => + Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraAnnotationAction.jsonFormat)) } } } @@ -236,9 +231,8 @@ case class UpdateActionGroup(version: Long, transactionGroupCount: Int, transactionGroupIndex: Int) { - def significantChangesCount: Int = 1 // TODO - - def viewChangesCount: Int = 1 // TODO + def significantChangesCount: Int = actions.count(!_.isViewOnlyChange) + def viewChangesCount: Int = actions.count(_.isViewOnlyChange) } object UpdateActionGroup { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala index 6893d1bcdc5..de77458a9e3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -25,8 +25,8 @@ trait UpdateGroupHandling { } private def isIsolationSensitiveAction(a: UpdateAction): Boolean = a match { - case _: RevertToVersionUpdateAction => true - case _: AddLayerAnnotationUpdateAction => true + case _: RevertToVersionAnnotationAction => true + case _: AddLayerAnnotationAction => true case _ => false } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 15f5fda72cc..d7231195a67 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient import com.scalableminds.webknossos.tracingstore.annotation.{ - RevertToVersionUpdateAction, + RevertToVersionAnnotationAction, TSAnnotationService, UpdateAction } @@ -418,7 +418,7 @@ class EditableMappingUpdater( ) } - def revertToVersion(revertAction: RevertToVersionUpdateAction)(implicit ec: ExecutionContext): Fox[Unit] = + def revertToVersion(revertAction: RevertToVersionAnnotationAction)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" _ = segmentToAgglomerateBuffer.clear() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index f78b744f3fa..4b7a79a4be8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -402,6 +402,8 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + + override def isViewOnlyChange: Boolean = true } case class UpdateTreeVisibilitySkeletonAction(treeId: Int, @@ -425,6 +427,8 @@ case class UpdateTreeVisibilitySkeletonAction(treeId: Int, override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + + override def isViewOnlyChange: Boolean = true } case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], @@ -534,23 +538,8 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) -} - -case class UpdateTdCameraSkeletonAction(actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - actionTracingId: String, - info: Option[String] = None) - extends SkeletonUpdateAction { - - override def applyOn(tracing: SkeletonTracing): SkeletonTracing = tracing - - override def addTimestamp(timestamp: Long): UpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = - this.copy(actionAuthorId = authorId) + override def isViewOnlyChange: Boolean = true } object CreateTreeSkeletonAction { @@ -608,6 +597,3 @@ object UpdateUserBoundingBoxVisibilitySkeletonAction { implicit val jsonFormat: OFormat[UpdateUserBoundingBoxVisibilitySkeletonAction] = Json.format[UpdateUserBoundingBoxVisibilitySkeletonAction] } -object UpdateTdCameraSkeletonAction { - implicit val jsonFormat: OFormat[UpdateTdCameraSkeletonAction] = Json.format[UpdateTdCameraSkeletonAction] -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 04833f3dbcc..a3e7ff236b2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -112,8 +112,6 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def isViewOnlyChange: Boolean = true - override def applyOn(tracing: VolumeTracing): VolumeTracing = { def updateUserBoundingBoxes(): Seq[NamedBoundingBoxProto] = @@ -126,6 +124,8 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int tracing.withUserBoundingBoxes(updateUserBoundingBoxes()) } + + override def isViewOnlyChange: Boolean = true } case class RemoveFallbackLayerVolumeAction(actionTracingId: String, @@ -172,24 +172,6 @@ case class AddSegmentIndexVolumeAction(actionTracingId: String, } -case class UpdateTdCameraVolumeAction(actionTracingId: String, - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends ApplyableVolumeUpdateAction { - - override def addTimestamp(timestamp: Long): VolumeUpdateAction = - this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - - override def applyOn(tracing: VolumeTracing): VolumeTracing = - tracing - - override def isViewOnlyChange: Boolean = true -} - case class CreateSegmentVolumeAction(id: Long, anchorPosition: Option[Vec3Int], name: Option[String], @@ -393,9 +375,6 @@ object ImportVolumeDataVolumeAction { object AddSegmentIndexVolumeAction { implicit val jsonFormat: OFormat[AddSegmentIndexVolumeAction] = Json.format[AddSegmentIndexVolumeAction] } -object UpdateTdCameraVolumeAction { - implicit val jsonFormat: OFormat[UpdateTdCameraVolumeAction] = Json.format[UpdateTdCameraVolumeAction] -} object CreateSegmentVolumeAction { implicit val jsonFormat: OFormat[CreateSegmentVolumeAction] = Json.format[CreateSegmentVolumeAction] } From ecb4b9d443eec597f0a148cdc4ee513fe4adcf8b Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 24 Oct 2024 11:32:59 +0200 Subject: [PATCH 118/361] prepare duplicate --- app/controllers/AnnotationController.scala | 59 ++++--------------- app/models/annotation/AnnotationService.scala | 4 +- .../WKRemoteTracingStoreClient.scala | 35 +++++++---- conf/webknossos.latest.routes | 2 - .../webknossos/datastore/rpc/RPCRequest.scala | 5 ++ .../annotation/TSAnnotationService.scala | 9 +-- .../controllers/TSAnnotationController.scala | 20 +++++++ ...alableminds.webknossos.tracingstore.routes | 1 + 8 files changed, 66 insertions(+), 69 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 81c1aa9f332..ff05b2b9cc9 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -3,7 +3,6 @@ package controllers import org.apache.pekko.util.Timeout import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} -import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.{ @@ -11,8 +10,8 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayerStatistics, AnnotationLayerType } -import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} @@ -43,7 +42,6 @@ class AnnotationController @Inject()( userDAO: UserDAO, organizationDAO: OrganizationDAO, datasetDAO: DatasetDAO, - tracingStoreDAO: TracingStoreDAO, datasetService: DatasetService, annotationService: AnnotationService, annotationMutexService: AnnotationMutexService, @@ -59,9 +57,7 @@ class AnnotationController @Inject()( analyticsService: AnalyticsService, slackNotificationService: SlackNotificationService, mailchimpClient: MailchimpClient, - tracingDataSourceTemporaryStore: TracingDataSourceTemporaryStore, conf: WkConf, - rpc: RPC, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with UserAwareRequestLogging @@ -233,30 +229,6 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def downsample(typ: String, id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.downsample.explorationalsOnly" - restrictions <- provider.restrictionsFor(typ, id) ?~> "restrictions.notFound" ~> NOT_FOUND - _ <- restrictions.allowUpdate(request.identity) ?~> "notAllowed" ~> FORBIDDEN - annotation <- provider.provideAnnotation(typ, id, request.identity) - annotationLayer <- annotation.annotationLayers - .find(_.tracingId == tracingId) - .toFox ?~> "annotation.downsample.layerNotFound" - _ <- annotationService.downsampleAnnotation(annotation, annotationLayer) ?~> "annotation.downsample.failed" - updated <- provider.provideAnnotation(typ, id, request.identity) - json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" - } yield JsonOk(json) - } - - def downsampleWithoutType(id: String, tracingId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND - result <- downsample(annotation.typ.toString, id, tracingId)(request) - } yield result - } - private def finishAnnotation(typ: String, id: String, issuingUser: User, timestamp: Instant)( implicit ctx: DBAccessContext): Fox[(Annotation, String)] = for { @@ -461,12 +433,16 @@ class AnnotationController @Inject()( datasetService.dataSourceFor(dataset).flatMap(_.toUsable).map(Some(_)) else Fox.successful(None) tracingStoreClient <- tracingStoreService.clientFor(dataset) - newAnnotationLayers <- Fox.serialCombined(annotation.annotationLayers) { annotationLayer => - duplicateAnnotationLayer(annotationLayer, - annotation._task.isDefined, - dataSource.map(_.boundingBox), - tracingStoreClient) - } + newAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotation._id, + version = None, + isFromTask = annotation._task.isDefined, + editPosition = None, + editRotation = None, + boundingBox = dataSource.map(_.boundingBox), + magRestrictions = MagRestrictions.empty + ) + newAnnotationLayers = newAnnotationProto.layers.map(AnnotationLayer.fromProto) clonedAnnotation <- annotationService.createFrom(user, dataset, newAnnotationLayers, @@ -475,19 +451,6 @@ class AnnotationController @Inject()( annotation.description) ?~> Messages("annotation.create.failed") } yield clonedAnnotation - private def duplicateAnnotationLayer(annotationLayer: AnnotationLayer, - isFromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - tracingStoreClient: WKRemoteTracingStoreClient): Fox[AnnotationLayer] = - for { - - newTracingId <- if (annotationLayer.typ == AnnotationLayerType.Skeleton) { - tracingStoreClient.duplicateSkeletonTracing(annotationLayer.tracingId, None, isFromTask) ?~> "Failed to duplicate skeleton tracing." - } else { - tracingStoreClient.duplicateVolumeTracing(annotationLayer.tracingId, isFromTask, datasetBoundingBox) ?~> "Failed to duplicate volume tracing." - } - } yield annotationLayer.copy(tracingId = newTracingId) - def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => logTime(slackNotificationService.noticeSlowRequest, durationThreshold = 1 second) { diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index de3160bc87c..5c7cc8aa431 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -502,7 +502,7 @@ class AnnotationService @Inject()( def createFrom(user: User, dataset: Dataset, - annotationLayers: List[AnnotationLayer], + annotationLayers: Seq[AnnotationLayer], annotationType: AnnotationType, name: Option[String], description: String): Fox[Annotation] = @@ -513,7 +513,7 @@ class AnnotationService @Inject()( None, teamId, user._id, - annotationLayers, + annotationLayers.toList, description, name = name.getOrElse(""), typ = annotationType) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 521baabc1c5..017cc421d96 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -90,23 +90,34 @@ class WKRemoteTracingStoreClient( .postProto[AnnotationProto](annotationProto) } - def duplicateSkeletonTracing(skeletonTracingId: String, - versionString: Option[String] = None, - isFromTask: Boolean = false, - editPosition: Option[Vec3Int] = None, - editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug("Called to duplicate SkeletonTracing." + baseInfo) - rpc(s"${tracingStore.url}/tracings/skeleton/$skeletonTracingId/duplicate").withLongTimeout + def duplicateAnnotation(annotationId: ObjectId, + version: Option[Long], + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + ): Fox[AnnotationProto] = { + logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryStringOptional("version", versionString) + .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("fromTask" -> isFromTask.toString) - .postWithJsonResponse[String] + .addQueryString("isFromTask" -> isFromTask.toString) + .addQueryStringOptional("minMag", magRestrictions.minStr) + .addQueryStringOptional("maxMag", magRestrictions.maxStr) + .postWithProtoResponse[AnnotationProto]()(AnnotationProto) } + def duplicateSkeletonTracing(skeletonTracingId: String, + versionString: Option[String] = None, + isFromTask: Boolean = false, + editPosition: Option[Vec3Int] = None, + editRotation: Option[Vec3Double] = None, + boundingBox: Option[BoundingBox] = None): Fox[String] = ??? + def duplicateVolumeTracing(volumeTracingId: String, isFromTask: Boolean = false, datasetBoundingBox: Option[BoundingBox] = None, @@ -119,8 +130,6 @@ class WKRemoteTracingStoreClient( rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("fromTask" -> isFromTask.toString) - .addQueryStringOptional("minMag", magRestrictions.minStr) - .addQueryStringOptional("maxMag", magRestrictions.maxStr) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 9bd9313e70e..877d9e1d28a 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -147,14 +147,12 @@ PATCH /annotations/:typ/:id/transfer PATCH /annotations/:typ/:id/editLockedState controllers.AnnotationController.editLockedState(typ: String, id: String, isLockedByOwner: Boolean) GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) -PATCH /annotations/:id/downsample controllers.AnnotationController.downsampleWithoutType(id: String, tracingId: String) DELETE /annotations/:id controllers.AnnotationController.cancelWithoutType(id: String) POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) -PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String, tracingId: String) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index ba8aee37c6d..c0d9b8695d2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -191,6 +191,11 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } + def postWithProtoResponse[T <: GeneratedMessage]()(companion: GeneratedMessageCompanion[T]): Fox[T] = { + request = request.withMethod("POST") + parseProtoResponse(performRequest)(companion) + } + private def performRequest: Fox[WSResponse] = { if (verbose) { logger.debug( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 1240e66c9bf..5d472b3c1f6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -146,10 +146,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield updated private def revertToVersion( - annotationId: String, - annotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionAnnotationAction, - newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + revertAction: RevertToVersionAnnotationAction, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = // Note: works only after “ironing out” the update action groups // TODO: read old annotationProto, tracing, buckets, segment indeces for { @@ -612,4 +612,5 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } + def duplicate(annotationId: String, version: Option[Long]): Fox[AnnotationProto] = ??? // TODO } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 97315646e2c..2649ca295c4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -100,4 +100,24 @@ class TSAnnotationController @Inject()( } } + def duplicate(annotationId: String, + version: Option[Long], + isFromTask: Option[Boolean], + minMag: Option[Int], + maxMag: Option[Int], + downsample: Option[Boolean], + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { + for { + annotationProto <- annotationService.duplicate(annotationId, version) + } yield Ok(annotationProto.toByteArray).as(protobufMimeType) + } + } + } + } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index df237857e93..a94898a9f11 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,6 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, version: Option[Long], isFromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From a007194e568ff1dfb2857193b97183813df7c0b3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 24 Oct 2024 13:47:40 +0200 Subject: [PATCH 119/361] iterate on duplicate --- app/controllers/AnnotationController.scala | 5 ++- app/controllers/AnnotationIOController.scala | 3 +- app/models/annotation/AnnotationService.scala | 5 ++- .../WKRemoteTracingStoreClient.scala | 2 + .../annotation/TSAnnotationService.scala | 40 ++++++++++++++++++- .../controllers/TSAnnotationController.scala | 3 +- ...alableminds.webknossos.tracingstore.routes | 2 +- 7 files changed, 52 insertions(+), 8 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index ff05b2b9cc9..9a915500a90 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -433,8 +433,10 @@ class AnnotationController @Inject()( datasetService.dataSourceFor(dataset).flatMap(_.toUsable).map(Some(_)) else Fox.successful(None) tracingStoreClient <- tracingStoreService.clientFor(dataset) + newAnnotationId = ObjectId.generate newAnnotationProto <- tracingStoreClient.duplicateAnnotation( annotation._id, + newAnnotationId, version = None, isFromTask = annotation._task.isDefined, editPosition = None, @@ -448,7 +450,8 @@ class AnnotationController @Inject()( newAnnotationLayers, AnnotationType.Explorational, None, - annotation.description) ?~> Messages("annotation.create.failed") + annotation.description, + newAnnotationId) ?~> Messages("annotation.create.failed") } yield clonedAnnotation def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 3215b05c3a7..08364247c6c 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -146,7 +146,8 @@ class AnnotationIOController @Inject()( mergedSkeletonLayers ::: mergedVolumeLayers, AnnotationType.Explorational, name, - description) + description, + ObjectId.generate) _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) } yield JsonOk( diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 5c7cc8aa431..076bfce75ee 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -505,10 +505,11 @@ class AnnotationService @Inject()( annotationLayers: Seq[AnnotationLayer], annotationType: AnnotationType, name: Option[String], - description: String): Fox[Annotation] = + description: String, + newAnnotationId: ObjectId): Fox[Annotation] = for { teamId <- selectSuitableTeam(user, dataset) - annotation = Annotation(ObjectId.generate, + annotation = Annotation(newAnnotationId, dataset._id, None, teamId, diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 017cc421d96..3ee30770dc1 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -91,6 +91,7 @@ class WKRemoteTracingStoreClient( } def duplicateAnnotation(annotationId: ObjectId, + newAnnotationId: ObjectId, version: Option[Long], isFromTask: Boolean, editPosition: Option[Vec3Int], @@ -101,6 +102,7 @@ class WKRemoteTracingStoreClient( logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 5d472b3c1f6..542e1cc27cf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -5,7 +5,11 @@ import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} -import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerTypeProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -612,5 +616,37 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - def duplicate(annotationId: String, version: Option[Long]): Fox[AnnotationProto] = ??? // TODO + // TODO duplicate v0 as well? (if current version is not v0) + def duplicate(annotationId: String, newAnnotationId: String, version: Option[Long])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationProto] = + for { + current <- get(annotationId, version) + newLayers <- Fox.serialCombined(current.layers)(layer => duplicateLayer(annotationId, layer, version)) + duplicated = current.copy(layers = newLayers) + // TODO save duplicated + } yield duplicated + + private def duplicateLayer(annotationId: String, layer: AnnotationLayerProto, version: Option[Long])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationLayerProto] = + for { + newTracingId <- layer.`type` match { + case AnnotationLayerTypeProto.volume => duplicateVolumeTracing(layer.tracingId, version) + case AnnotationLayerTypeProto.skeleton => duplicateSkeletonTracing(layer.tracingId, version) + case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") + } + } yield layer.copy(tracingId = newTracingId) + + private def duplicateVolumeTracing(tracingId: String, version: Option[Long])( + implicit ec: ExecutionContext): Fox[String] = { + val newTracingId = TracingId.generate + Fox.successful(newTracingId) + } + + private def duplicateSkeletonTracing(tracingId: String, version: Option[Long])( + implicit ec: ExecutionContext): Fox[String] = { + val newTracingId = TracingId.generate + Fox.successful(newTracingId) + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 2649ca295c4..6a7a232374a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -101,6 +101,7 @@ class TSAnnotationController @Inject()( } def duplicate(annotationId: String, + newAnnotationId: String, version: Option[Long], isFromTask: Option[Boolean], minMag: Option[Int], @@ -114,7 +115,7 @@ class TSAnnotationController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { for { - annotationProto <- annotationService.duplicate(annotationId, version) + annotationProto <- annotationService.duplicate(annotationId, newAnnotationId, version) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index a94898a9f11..5edab1323f4 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,7 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) -POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, version: Option[Long], isFromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From af0630a90d1822fae64e1237b456ed9cf7c37003 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 24 Oct 2024 14:07:40 +0200 Subject: [PATCH 120/361] iterate on duplicate --- .../annotation/TSAnnotationService.scala | 75 ++++++++++++++----- .../SkeletonTracingController.scala | 6 +- .../controllers/TSAnnotationController.scala | 15 +++- .../skeleton/SkeletonTracingService.scala | 13 ++-- ...alableminds.webknossos.tracingstore.routes | 2 +- 5 files changed, 78 insertions(+), 33 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 542e1cc27cf..18f4cc1df86 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} @@ -617,36 +618,74 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } // TODO duplicate v0 as well? (if current version is not v0) - def duplicate(annotationId: String, newAnnotationId: String, version: Option[Long])( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationProto] = + def duplicate( + annotationId: String, + newAnnotationId: String, + version: Option[Long], + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { - current <- get(annotationId, version) - newLayers <- Fox.serialCombined(current.layers)(layer => duplicateLayer(annotationId, layer, version)) - duplicated = current.copy(layers = newLayers) - // TODO save duplicated - } yield duplicated - - private def duplicateLayer(annotationId: String, layer: AnnotationLayerProto, version: Option[Long])( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationLayerProto] = + currentAnnotation <- get(annotationId, version) + newLayers <- Fox.serialCombined(currentAnnotation.layers)( + layer => + duplicateLayer(annotationId, + layer, + currentAnnotation.version, + isFromTask, + editPosition, + editRotation, + boundingBox)) + duplicatedAnnotation = currentAnnotation.copy(layers = newLayers) + _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) + } yield duplicatedAnnotation + + private def duplicateLayer( + annotationId: String, + layer: AnnotationLayerProto, + version: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- layer.`type` match { - case AnnotationLayerTypeProto.volume => duplicateVolumeTracing(layer.tracingId, version) - case AnnotationLayerTypeProto.skeleton => duplicateSkeletonTracing(layer.tracingId, version) + case AnnotationLayerTypeProto.volume => duplicateVolumeTracing(annotationId, layer.tracingId, version) + case AnnotationLayerTypeProto.skeleton => + duplicateSkeletonTracing(annotationId, + layer.tracingId, + version, + isFromTask, + editPosition, + editRotation, + boundingBox) case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") } } yield layer.copy(tracingId = newTracingId) - private def duplicateVolumeTracing(tracingId: String, version: Option[Long])( + private def duplicateVolumeTracing(annotationId: String, tracingId: String, version: Long)( implicit ec: ExecutionContext): Fox[String] = { val newTracingId = TracingId.generate Fox.successful(newTracingId) } - private def duplicateSkeletonTracing(tracingId: String, version: Option[Long])( - implicit ec: ExecutionContext): Fox[String] = { + private def duplicateSkeletonTracing( + annotationId: String, + tracingId: String, + version: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate - Fox.successful(newTracingId) + for { + skeleton <- findSkeleton(annotationId, tracingId, Some(version)) + adaptedSkeleton = skeletonTracingService + .adaptSkeletonForDuplicate(skeleton, isFromTask, editPosition, editRotation, boundingBox) + .withVersion(version) + _ <- tracingDataStore.skeletons.put(newTracingId, version, adaptedSkeleton) + } yield newTracingId } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index f41279b4928..faa71d0711a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -159,11 +159,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - newId <- skeletonTracingService.duplicate(tracing, - fromTask.getOrElse(false), - editPositionParsed, - editRotationParsed, - boundingBoxParsed) + newId = TracingId.generate } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 6a7a232374a..69ee73b8150 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -1,6 +1,8 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest @@ -103,7 +105,7 @@ class TSAnnotationController @Inject()( def duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], - isFromTask: Option[Boolean], + isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], @@ -115,7 +117,16 @@ class TSAnnotationController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { for { - annotationProto <- annotationService.duplicate(annotationId, newAnnotationId, version) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + annotationProto <- annotationService.duplicate(annotationId, + newAnnotationId, + version, + isFromTask, + editPositionParsed, + editRotationParsed, + boundingBoxParsed) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index dcbe1eb86c0..d25d2a4a79c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -35,11 +35,11 @@ class SkeletonTracingService @Inject()( implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing - def duplicate(tracing: SkeletonTracing, - fromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox]): Fox[String] = { + def adaptSkeletonForDuplicate(tracing: SkeletonTracing, + fromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox]): SkeletonTracing = { val taskBoundingBox = if (fromTask) { tracing.boundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 @@ -57,8 +57,7 @@ class SkeletonTracingService @Inject()( version = 0 ) .addAllUserBoundingBoxes(taskBoundingBox) - val finalTracing = if (fromTask) newTracing.clearBoundingBox else newTracing - save(finalTracing, None, finalTracing.version) + if (fromTask) newTracing.clearBoundingBox else newTracing } def merge(tracings: Seq[SkeletonTracing], diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 5edab1323f4..f889ba4fde2 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,7 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) -POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From 2fa24c6d4c45d62f776bce40850b80f8a1389813 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 25 Oct 2024 14:44:11 +0200 Subject: [PATCH 121/361] do not add actionTracingId to updateTdCamera and revertToVersion update actions --- .../oxalis/model/reducers/save_reducer.ts | 24 ++++++++++--------- frontend/javascripts/oxalis/store.ts | 4 ++-- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 06fb2975175..73de1df5aca 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -196,17 +196,19 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { export function addTracingIdToActions( actions: UpdateAction[], tracingId: string, -): UpdateActionWithTracingId[] { - return actions.map( - (innerAction) => - ({ - ...innerAction, - value: { - ...innerAction.value, - actionTracingId: tracingId, - }, - }) as UpdateActionWithTracingId, - ); +): Array { + return actions.map((action) => { + if (action.name === "updateTdCamera" || action.name === "revertToVersion") { + return action as UpdateAction; + } + return { + ...action, + value: { + ...action.value, + actionTracingId: tracingId, + }, + } as UpdateActionWithTracingId; + }); } export default SaveReducer; diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 72c353756df..22585186ed6 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -50,7 +50,7 @@ import type { } from "oxalis/constants"; import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; +import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; import type DiffableMap from "libs/diffable_map"; @@ -446,7 +446,7 @@ export type SaveQueueEntry = { version: number; timestamp: number; authorId: string; - actions: Array; + actions: Array; transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; From 2f80f0f78e6ea98fcc43e3308af6e4f9f26935cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 25 Oct 2024 14:45:35 +0200 Subject: [PATCH 122/361] some frontend spec fixes (far more are still needed xD) --- .../javascripts/test/sagas/save_saga.spec.ts | 18 +++--------------- .../test/sagas/skeletontracing_saga.spec.ts | 3 ++- 2 files changed, 5 insertions(+), 16 deletions(-) diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 8036007eb96..a7881fb031d 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -301,11 +301,7 @@ test("SaveSaga should remove the correct update actions", (t) => { ]); saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(3))); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(2))); }); @@ -331,11 +327,7 @@ test("SaveSaga should set the correct version numbers", (t) => { ]); saga.next(annotationId); saga.next(TRACINGSTORE_URL); - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 3, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 3))); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); @@ -362,11 +354,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp saga.next(annotationId); saga.next(TRACINGSTORE_URL); // two of the updateTracing update actions are removed by compactSaveQueue - expectValueDeepEqual( - t, - saga.next(), - put(SaveActions.setVersionNumberAction(LAST_VERSION + 1, TRACING_TYPE, tracingId)), - ); + expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 1))); expectValueDeepEqual(t, saga.next(), put(SaveActions.setLastSaveTimestampAction())); expectValueDeepEqual(t, saga.next(), put(SaveActions.shiftSaveQueueAction(3))); }); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 5d8e449b456..1e1011bba7c 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -79,6 +79,8 @@ function testDiffing( ); } +// TODOM +// biome-ignore lint/correctness/noUnusedVariables: function compactSaveQueueWithUpdateActions( queue: Array, tracing: SkeletonTracing, @@ -90,7 +92,6 @@ function compactSaveQueueWithUpdateActions( // filling the save queue). one could probably combine compactUpdateActions and // createSaveQueueFromUpdateActions to have a createCompactedSaveQueueFromUpdateActions // helper function and use that in this spec. - // @ts-expect-error queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), ); } From 52076c13f141a2c968c63fc6db35ddb89dfbdddb Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 10:04:18 +0100 Subject: [PATCH 123/361] iterate on duplicate (volumes, updates, buckets) --- .../annotation/TSAnnotationService.scala | 71 +++++++++++++++--- .../SkeletonTracingController.scala | 23 ------ .../controllers/TSAnnotationController.scala | 7 +- .../controllers/VolumeTracingController.scala | 48 ------------- .../skeleton/SkeletonTracingService.scala | 5 +- .../volume/VolumeTracingService.scala | 72 +++++++++---------- ...alableminds.webknossos.tracingstore.routes | 2 - 7 files changed, 103 insertions(+), 125 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 18f4cc1df86..e5e2ac90f84 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -31,6 +31,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ ApplyableVolumeUpdateAction, BucketMutatingVolumeUpdateAction, + MagRestrictions, UpdateMappingNameVolumeAction, VolumeTracingService, VolumeUpdateAction @@ -625,7 +626,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss isFromTask: Boolean, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = + boundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { currentAnnotation <- get(annotationId, version) newLayers <- Fox.serialCombined(currentAnnotation.layers)( @@ -636,11 +638,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss isFromTask, editPosition, editRotation, - boundingBox)) + boundingBox, + magRestrictions)) + _ <- duplicateUpdates(annotationId, newAnnotationId) duplicatedAnnotation = currentAnnotation.copy(layers = newLayers) _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) } yield duplicatedAnnotation + private def duplicateUpdates(annotationId: String, newAnnotationId: String)( + implicit ec: ExecutionContext): Fox[Unit] = + // TODO perf: batch or use fossildb duplicate api + for { + updatesAsBytes: Seq[(Long, Array[Byte])] <- tracingDataStore.annotationUpdates + .getMultipleVersionsAsVersionValueTuple(annotationId) + _ <- Fox.serialCombined(updatesAsBytes) { + case (version, updateBytes) => + tracingDataStore.annotationUpdates.put(newAnnotationId, version, updateBytes) + } + } yield () + private def duplicateLayer( annotationId: String, layer: AnnotationLayerProto, @@ -648,10 +664,19 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss isFromTask: Boolean, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = + boundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- layer.`type` match { - case AnnotationLayerTypeProto.volume => duplicateVolumeTracing(annotationId, layer.tracingId, version) + case AnnotationLayerTypeProto.volume => + duplicateVolumeTracing(annotationId, + layer.tracingId, + version, + isFromTask, + boundingBox, + magRestrictions, + editPosition, + editRotation) case AnnotationLayerTypeProto.skeleton => duplicateSkeletonTracing(annotationId, layer.tracingId, @@ -664,10 +689,33 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield layer.copy(tracingId = newTracingId) - private def duplicateVolumeTracing(annotationId: String, tracingId: String, version: Long)( - implicit ec: ExecutionContext): Fox[String] = { + private def duplicateVolumeTracing( + annotationId: String, + sourceTracingId: String, + version: Long, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate - Fox.successful(newTracingId) + for { + sourceTracing <- findVolume(annotationId, sourceTracingId, Some(version)) + newTracing <- volumeTracingService.adaptVolumeForDuplicate(sourceTracingId, + newTracingId, + sourceTracing, + isFromTask, + boundingBox, + magRestrictions, + editPosition, + editRotation, + version) + _ <- tracingDataStore.volumes.put(newTracingId, version, newTracing) + _ <- volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing) + /*_ <- Fox.runIf(adaptedTracing.getHasEditableMapping)( + editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ + // TODO downsample? + } yield newTracingId } private def duplicateSkeletonTracing( @@ -681,9 +729,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss val newTracingId = TracingId.generate for { skeleton <- findSkeleton(annotationId, tracingId, Some(version)) - adaptedSkeleton = skeletonTracingService - .adaptSkeletonForDuplicate(skeleton, isFromTask, editPosition, editRotation, boundingBox) - .withVersion(version) + adaptedSkeleton = skeletonTracingService.adaptSkeletonForDuplicate(skeleton, + isFromTask, + editPosition, + editRotation, + boundingBox, + version) _ <- tracingDataStore.skeletons.put(newTracingId, version, adaptedSkeleton) } yield newTracingId } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index faa71d0711a..bcfe15941fd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -1,7 +1,6 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject -import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest @@ -143,26 +142,4 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def duplicate(tracingId: String, - version: Option[Long], - fromTask: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findSkeleton(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( - "tracing.notFound") - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - newId = TracingId.generate - } yield Ok(Json.toJson(newId)) - } - } - } - } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 69ee73b8150..ea689011306 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -14,6 +14,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -108,7 +109,7 @@ class TSAnnotationController @Inject()( isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], - downsample: Option[Boolean], + downsample: Option[Boolean], // TODO remove or implement editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]): Action[AnyContent] = @@ -120,13 +121,15 @@ class TSAnnotationController @Inject()( editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + magRestrictions = MagRestrictions(minMag, maxMag) annotationProto <- annotationService.duplicate(annotationId, newAnnotationId, version, isFromTask, editPositionParsed, editRotationParsed, - boundingBoxParsed) + boundingBoxParsed, + magRestrictions) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 0b0305f627c..a56a441fba6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -277,54 +277,6 @@ class VolumeTracingController @Inject()( private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(tracingId: String, - fromTask: Option[Boolean], - minMag: Option[Int], - maxMag: Option[Int], - downsample: Option[Boolean], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") - _ = logger.info(s"Duplicating volume tracing $tracingId...") - datasetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) - magRestrictions = MagRestrictions(minMag, maxMag) - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( - volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newTracingId = TracingId.generate - // TODO - /*_ <- Fox.runIf(tracing.getHasEditableMapping)( - editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ - // TODO actionTracingIds + addLayer tracing ids need to be remapped (as they need to be globally unique) - (newId, newTracing) <- volumeTracingService.duplicate( - annotationId, - tracingId, - newTracingId, - tracing, - fromTask.getOrElse(false), - datasetBoundingBox, - magRestrictions, - editPositionParsed, - editRotationParsed, - boundingBoxParsed, - mappingName = None - ) - _ <- Fox.runIfOptionTrue(downsample)( - volumeTracingService.downsample(annotationId, newId, tracingId, newTracing)) - } yield Ok(Json.toJson(newId)) - } - } - } - } - def importVolumeData(tracingId: String): Action[MultipartFormData[TemporaryFile]] = Action.async(parse.multipartFormData) { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index d25d2a4a79c..700c13c41c5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -39,7 +39,8 @@ class SkeletonTracingService @Inject()( fromTask: Boolean, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox]): SkeletonTracing = { + boundingBox: Option[BoundingBox], + newVersion: Long): SkeletonTracing = { val taskBoundingBox = if (fromTask) { tracing.boundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 @@ -54,7 +55,7 @@ class SkeletonTracingService @Inject()( editPosition = editPosition.map(vec3IntToProto).getOrElse(tracing.editPosition), editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracing.editRotation), boundingBox = boundingBoxOptToProto(boundingBox).orElse(tracing.boundingBox), - version = 0 + version = newVersion ) .addAllUserBoundingBoxes(taskBoundingBox) if (fromTask) newTracing.clearBoundingBox else newTracing diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 1f0d68a805d..291fd749f39 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -486,38 +486,34 @@ class VolumeTracingService @Inject()( data <- binaryDataService.handleDataRequests(requests) } yield data - def duplicate(annotationId: String, - tracingId: String, - newTracingId: String, - sourceTracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - mappingName: Option[String])(implicit tc: TokenContext): Fox[(String, VolumeTracing)] = { - val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, datasetBoundingBox) + def adaptVolumeForDuplicate(sourceTracingId: String, + newTracingId: String, + sourceTracing: VolumeTracing, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[VolumeTracing] = { + val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, boundingBox) val tracingWithMagRestrictions = restrictMagList(tracingWithBB, magRestrictions) for { - fallbackLayer <- getFallbackLayer(tracingId, sourceTracing) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithMagRestrictions.copy( createdTimestamp = System.currentTimeMillis(), - editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithMagRestrictions.editPosition), - editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithMagRestrictions.editRotation), - boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithMagRestrictions.boundingBox), - mappingName = mappingName.orElse( + editPosition = editPosition.map(vec3IntToProto).getOrElse(sourceTracing.editPosition), + editRotation = editRotation.map(vec3DoubleToProto).getOrElse(sourceTracing.editRotation), + boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(sourceTracing.boundingBox), + mappingName = if (sourceTracing.getHasEditableMapping) Some(newTracingId) - else tracingWithMagRestrictions.mappingName), - version = 0, + else sourceTracing.mappingName, + version = newVersion, // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData hasSegmentIndex = Some(hasSegmentIndex) ) _ <- bool2Fox(newTracing.mags.nonEmpty) ?~> "magRestrictions.tooTight" - newId <- save(newTracing, Some(newTracingId), newTracing.version) - _ <- duplicateData(annotationId, tracingId, sourceTracing, newId, newTracing) - } yield (newId, newTracing) + } yield newTracing } @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand @@ -536,21 +532,21 @@ class VolumeTracingService @Inject()( .withBoundingBox(datasetBoundingBox.get) } else tracing - private def duplicateData(annotationId: String, - sourceId: String, - sourceTracing: VolumeTracing, - destinationId: String, - destinationTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = + def duplicateVolumeData(sourceTracingId: String, + sourceTracing: VolumeTracing, + newTracingId: String, + newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = for { - isTemporaryTracing <- isTemporaryTracing(sourceId) - sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() - destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) - fallbackLayer <- getFallbackLayer(sourceId, sourceTracing) + isTemporaryTracing <- isTemporaryTracing(sourceTracingId) + sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) + buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream( + Some(newTracing.version)) + destinationDataLayer = volumeTracingLayer(newTracingId, newTracing) + fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( - destinationId, + newTracingId, volumeSegmentIndexClient, - destinationTracing.version, + newTracing.version, remoteDatastoreClient, fallbackLayer, AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), @@ -559,10 +555,10 @@ class VolumeTracingService @Inject()( mappingName <- selectMappingName(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => - if (destinationTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { + if (newTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { for { - _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) - _ <- Fox.runIfOptionTrue(destinationTracing.hasSegmentIndex)( + _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, newTracing.version) + _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, bucketPosition, @@ -570,7 +566,7 @@ class VolumeTracingService @Inject()( Empty, sourceTracing.elementClass, mappingName, - editableMappingTracingId(sourceTracing, sourceId) + editableMappingTracingId(sourceTracing, sourceTracingId) )) } yield () } else Fox.successful(()) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index f889ba4fde2..5caca298037 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -20,7 +20,6 @@ POST /volume/:tracingId/initialDataMultiple GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) -POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, fromTask: Option[Boolean], minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) @@ -74,4 +73,3 @@ POST /skeleton/mergedFromContents POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple -POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, version: Option[Long], fromTask: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From ce147181eb4d8d434ddffea6d8e3b3f145d45265 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 10:17:32 +0100 Subject: [PATCH 124/361] duplicate editable mapping --- .../annotation/TSAnnotationService.scala | 22 ++++++++--- .../EditableMappingController.scala | 10 ++--- .../EditableMappingLayer.scala | 2 +- .../EditableMappingService.scala | 37 +++---------------- 4 files changed, 29 insertions(+), 42 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index e5e2ac90f84..7b4370d3855 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -248,7 +248,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss reportChangesToWk) ?~> "applyUpdates.failed" } yield updated - def getEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( + def findEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( implicit ec: ExecutionContext, tc: TokenContext): Fox[EditableMappingInfo] = for { @@ -546,7 +546,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tc: TokenContext): Fox[Option[String]] = if (tracing.getHasEditableMapping) for { - editableMappingInfo <- getEditableMappingInfo(annotationId, tracingId) + editableMappingInfo <- findEditableMappingInfo(annotationId, tracingId) } yield Some(editableMappingInfo.baseMappingName) else Fox.successful(tracing.mappingName) @@ -711,13 +711,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editRotation, version) _ <- tracingDataStore.volumes.put(newTracingId, version, newTracing) - _ <- volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing) - /*_ <- Fox.runIf(adaptedTracing.getHasEditableMapping)( - editableMappingService.duplicate(tracingId, newTracingId, version = None, remoteFallbackLayerOpt))*/ + _ <- Fox.runIf(!newTracing.getHasEditableMapping)( + volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing)) + _ <- Fox.runIf(newTracing.getHasEditableMapping)( + duplicateEditableMapping(annotationId, sourceTracingId, newTracingId, version)) // TODO downsample? } yield newTracingId } + private def duplicateEditableMapping(annotationId: String, + sourceTracingId: String, + newTracingId: String, + version: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + for { + editableMappingInfo <- findEditableMappingInfo(annotationId, sourceTracingId, Some(version)) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, version, toProtoBytes(editableMappingInfo)) + _ <- editableMappingService.duplicateSegmentToAgglomerate(sourceTracingId, newTracingId, version) + _ <- editableMappingService.duplicateAgglomerateToGraph(sourceTracingId, newTracingId, version) + } yield () + private def duplicateSkeletonTracing( annotationId: String, tracingId: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 8e5cc234a5e..5abf9014d24 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -113,7 +113,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) } yield Ok(infoJson) } @@ -151,7 +151,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId, version = None) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version = None) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, @@ -173,7 +173,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) edges <- editableMappingService.agglomerateGraphMinCut(tracingId, tracing.version, editableMappingInfo, @@ -193,7 +193,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(tracingId, editableMappingInfo, tracing.version, @@ -211,7 +211,7 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" - editableMappingInfo <- annotationService.getEditableMappingInfo(annotationId, tracingId) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(tracingId, tracing.version, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 8b1561ac8ad..1f320a38d59 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -36,7 +36,7 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied - editableMappingInfo <- layer.annotationService.getEditableMappingInfo(layer.annotationId, + editableMappingInfo <- layer.annotationService.findEditableMappingInfo(layer.annotationId, tracingId, Some(layer.version))(ec, layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index dcc8c012cc6..66dd0e32eb7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -135,60 +135,35 @@ class EditableMappingService @Inject()( } yield newEditableMappingInfo } - /* TODO - def duplicate(sourceTracingId: String, - newTracingId: String, - version: Option[Long], - remoteFallbackLayerBox: Box[RemoteFallbackLayer])(implicit tc: TokenContext): Fox[Unit] = - for { - remoteFallbackLayer <- remoteFallbackLayerBox ?~> "duplicate on editable mapping without remote fallback layer" - (duplicatedInfo, newVersion) <- getInfoAndActualVersion(sourceTracingId, version, remoteFallbackLayer) - _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, newVersion, toProtoBytes(duplicatedInfo)) - _ <- duplicateSegmentToAgglomerate(sourceTracingId, newTracingId, newVersion) - _ <- duplicateAgglomerateToGraph(sourceTracingId, newTracingId, newVersion) - updateActionsWithVersions <- getUpdateActionsWithVersions(sourceTracingId, newVersion, 0L) - _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion: (Long, List[UpdateAction]) => - tracingDataStore.editableMappingUpdates.put(newTracingId, - updateActionsWithVersion._1, - updateActionsWithVersion._2) - } - } yield () - - - private def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { + def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, version: Long): Fox[Unit] = { val iterator = new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsSegmentToAgglomerate, - Some(newVersion)) + Some(version)) for { _ <- Fox.combined(iterator.map { keyValuePair => for { chunkId <- chunkIdFromSegmentToAgglomerateKey(keyValuePair.key).toFox newKey = segmentToAgglomerateKey(newId, chunkId) - _ <- tracingDataStore.editableMappingsSegmentToAgglomerate.put(newKey, - version = newVersion, - keyValuePair.value) + _ <- tracingDataStore.editableMappingsSegmentToAgglomerate.put(newKey, version = version, keyValuePair.value) } yield () }.toList) } yield () } - private def duplicateAgglomerateToGraph(sourceTracingId: String, newId: String, newVersion: Long): Fox[Unit] = { + def duplicateAgglomerateToGraph(sourceTracingId: String, newId: String, version: Long): Fox[Unit] = { val iterator = - new VersionedFossilDbIterator(sourceTracingId, - tracingDataStore.editableMappingsAgglomerateToGraph, - Some(newVersion)) + new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsAgglomerateToGraph, Some(version)) for { _ <- Fox.combined(iterator.map { keyValuePair => for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(keyValuePair.key).toFox newKey = agglomerateGraphKey(newId, agglomerateId) - _ <- tracingDataStore.editableMappingsAgglomerateToGraph.put(newKey, version = newVersion, keyValuePair.value) + _ <- tracingDataStore.editableMappingsAgglomerateToGraph.put(newKey, version = version, keyValuePair.value) } yield () }.toList) } yield () } - */ def assertTracingHasEditableMapping(tracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = bool2Fox(tracing.getHasEditableMapping) ?~> "annotation.volume.noEditableMapping" From ba476478555d3d3aa4e3e740f1fa285d4b0d8841 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 10:24:49 +0100 Subject: [PATCH 125/361] do not report layer changes to postgres when loading an older version --- .../tracingstore/annotation/TSAnnotationService.scala | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 7b4370d3855..3d96cbe2ede 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -458,7 +458,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) - _ <- remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVerson.annotation) // TODO perf: skip if annotation is identical + _ <- Fox.runIf(reportChangesToWk)(remoteWebknossosClient.updateAnnotation( + annotationId, + updatedWithNewVerson.annotation)) // TODO perf: skip if annotation is identical } yield updatedWithNewVerson } } @@ -492,10 +494,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss * hence the emptyFallbck annotation.version) */ for { - newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion( - annotationId, - mayBeEmpty = Some(true), - emptyFallback = Some(0L)) // TODO in case of empty, look in annotation table, take version from there + newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) } yield { targetVersionOpt match { case None => newestUpdateVersion From b3f9a2c659a137f1fb3ee0fa274e942525c68b4a Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 11:07:02 +0100 Subject: [PATCH 126/361] some cleanup --- app/models/annotation/AnnotationService.scala | 10 +-------- .../WKRemoteTracingStoreClient.scala | 12 +--------- .../annotation/AnnotationReversion.scala | 4 +--- .../annotation/AnnotationUpdateActions.scala | 5 ++++- .../annotation/AnnotationWithTracings.scala | 3 ++- .../annotation/TSAnnotationService.scala | 22 +++++++++---------- .../controllers/TSAnnotationController.scala | 3 --- ...alableminds.webknossos.tracingstore.routes | 2 +- 8 files changed, 21 insertions(+), 40 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 076bfce75ee..fc2c4bb1c42 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -305,15 +305,7 @@ class AnnotationService @Inject()( def downsampleAnnotation(annotation: Annotation, volumeAnnotationLayer: AnnotationLayer)( implicit ctx: DBAccessContext): Fox[Unit] = - for { - dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFoundForAnnotation" - _ <- bool2Fox(volumeAnnotationLayer.typ == AnnotationLayerType.Volume) ?~> "annotation.downsample.volumeOnly" - rpcClient <- tracingStoreService.clientFor(dataset) - newVolumeTracingId <- rpcClient.duplicateVolumeTracing(volumeAnnotationLayer.tracingId, downsample = true) - _ = logger.info( - s"Replacing volume tracing ${volumeAnnotationLayer.tracingId} by downsampled copy $newVolumeTracingId for annotation ${annotation._id}.") - _ <- annotationLayersDAO.replaceTracingId(annotation._id, volumeAnnotationLayer.tracingId, newVolumeTracingId) - } yield () + ??? // TODO: remove feature or implement as update action // WARNING: needs to be repeatable, might be called multiple times for an annotation def finish(annotation: Annotation, user: User, restrictions: AnnotationRestrictions)( diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 3ee30770dc1..b66ccee76b6 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -127,17 +127,7 @@ class WKRemoteTracingStoreClient( downsample: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = { - logger.debug(s"Called to duplicate volume tracing $volumeTracingId. $baseInfo") - rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/duplicate").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("fromTask" -> isFromTask.toString) - .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) - .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) - .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .addQueryString("downsample" -> downsample.toString) - .postJsonWithJsonResponse[Option[BoundingBox], String](datasetBoundingBox) - } + boundingBox: Option[BoundingBox] = None): Fox[String] = ??? def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index f7108ffbaad..a251d545fee 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -11,12 +11,10 @@ trait AnnotationReversion { def volumeTracingService: VolumeTracingService - def revertDistributedElements(annotationId: String, - currentAnnotationWithTracings: AnnotationWithTracings, + def revertDistributedElements(currentAnnotationWithTracings: AnnotationWithTracings, sourceAnnotationWithTracings: AnnotationWithTracings, revertAction: RevertToVersionAnnotationAction, newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = - // TODO segment index, volume buckets, proofreading data for { _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { // Only volume data for volume layers present in the *source annotation* needs to be reverted. diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 4d44bcc1407..95a35cd4c95 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions @@ -12,7 +13,9 @@ case class AnnotationLayerParameters(typ: AnnotationLayerType, mappingName: Option[String] = None, magRestrictions: Option[MagRestrictions], name: Option[String], - additionalAxes: Option[Seq[AdditionalAxis]]) + additionalAxes: Option[Seq[AdditionalAxis]]) { + def getNameWithDefault: String = name.getOrElse(AnnotationLayer.defaultNameForType(typ)) +} object AnnotationLayerParameters { implicit val jsonFormat: OFormat[AnnotationLayerParameters] = Json.using[WithDefaultValues].format[AnnotationLayerParameters] diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index fddeda5bf6b..482ebb5b12c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -92,7 +92,8 @@ case class AnnotationWithTracings( ) def deleteTracing(a: DeleteLayerAnnotationAction): AnnotationWithTracings = - this.copy(annotation = annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId))) + this.copy(annotation = annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), + tracingsById = tracingsById.removed(a.tracingId)) def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = this.copy(annotation = annotation.copy(layers = annotation.layers.map(l => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 3d96cbe2ede..39ccdb92d0a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox -import com.scalableminds.util.tools.Fox.{box2Fox, option2Fox} +import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.Annotation.{ AnnotationLayerProto, AnnotationLayerTypeProto, @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappin import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingLayer, EditableMappingService, @@ -145,6 +146,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion: Long)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = for { tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" + _ <- bool2Fox( + !annotationWithTracings.annotation.layers + .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" + _ <- bool2Fox( + !annotationWithTracings.annotation.layers.exists( + _.`type` == AnnotationLayerTypeProto.skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters, previousVersion = targetVersion - 1) @@ -157,7 +164,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss revertAction: RevertToVersionAnnotationAction, newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = // Note: works only after “ironing out” the update action groups - // TODO: read old annotationProto, tracing, buckets, segment indeces for { sourceAnnotation: AnnotationWithTracings <- getWithTracings( annotationId, @@ -167,14 +173,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll = true) // TODO do we need to request the others? _ = logger.info( s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") - _ <- revertDistributedElements(annotationId, annotationWithTracings, sourceAnnotation, revertAction, newVersion) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction, newVersion) } yield sourceAnnotation - def createTracing(a: AddLayerAnnotationAction)( - implicit ec: ExecutionContext): Fox[Either[SkeletonTracing, VolumeTracing]] = - Fox.failure("not implemented") - // TODO create tracing object (ask wk for needed parameters e.g. fallback layer info?) - def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( implicit ec: ExecutionContext): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -256,7 +257,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing - // move the functions that construct the AnnotationWithTracigns elsewhere? + // TODO move the functions that construct the AnnotationWithTracigns elsewhere to keep this file smaller? private def addEditableMapping( annotationId: String, annotationWithTracings: AnnotationWithTracings, @@ -295,7 +296,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationWithTracings, updatesFlat, annotation.version, - targetVersion) // TODO: targetVersion should be set per update group + targetVersion) // TODO: targetVersion must be set per update group, as reverts may come between these updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped, @@ -716,7 +717,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing)) _ <- Fox.runIf(newTracing.getHasEditableMapping)( duplicateEditableMapping(annotationId, sourceTracingId, newTracingId, version)) - // TODO downsample? } yield newTracingId } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index ea689011306..2058d2d6e23 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -34,9 +34,7 @@ class TSAnnotationController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - // TODO assert id does not already exist _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) - _ = logger.info(s"stored annotationProto for $annotationId") } yield Ok } } @@ -109,7 +107,6 @@ class TSAnnotationController @Inject()( isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], - downsample: Option[Boolean], // TODO remove or implement editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]): Action[AnyContent] = diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 5caca298037..e5c6ec7a9a1 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,7 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) -POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From 8a0d637f60e0f75a1e0dfb0a054c47b90465696e Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 11:25:06 +0100 Subject: [PATCH 127/361] use new annotation duplicate in task assignment --- app/models/annotation/Annotation.scala | 12 +++ app/models/annotation/AnnotationService.scala | 81 ++++++++++--------- 2 files changed, 55 insertions(+), 38 deletions(-) diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index d608250a649..f4eccd723db 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -515,6 +515,18 @@ class AnnotationDAO @Inject()(sqlClient: SqlClient, annotationLayerDAO: Annotati AND a.typ = ${AnnotationType.Task} """.as[ObjectId]) } yield r.toList + def findBaseIdForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[ObjectId] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT _id + FROM $existingCollectionName + WHERE _task = $taskId + AND typ = ${AnnotationType.TracingBase} + AND state != ${AnnotationState.Cancelled} + AND $accessQuery""".as[ObjectId]) + firstRow <- r.headOption + } yield firstRow + def findAllByTaskIdAndType(taskId: ObjectId, typ: AnnotationType)( implicit ctx: DBAccessContext): Fox[List[Annotation]] = for { diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index fc2c4bb1c42..bade14cfdf5 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -354,46 +354,37 @@ class AnnotationService @Inject()( def annotationsFor(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[List[Annotation]] = annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.Task) - private def tracingsFromBase(annotationBase: Annotation, dataset: Dataset)( - implicit ctx: DBAccessContext, - m: MessagesProvider): Fox[(Option[String], Option[String])] = - for { - _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - baseSkeletonIdOpt <- annotationBase.skeletonTracingId - baseVolumeIdOpt <- annotationBase.volumeTracingId - newSkeletonId: Option[String] <- Fox.runOptional(baseSkeletonIdOpt)(skeletonId => - tracingStoreClient.duplicateSkeletonTracing(skeletonId)) - newVolumeId: Option[String] <- Fox.runOptional(baseVolumeIdOpt)(volumeId => - tracingStoreClient.duplicateVolumeTracing(volumeId)) - } yield (newSkeletonId, newVolumeId) - def createAnnotationFor(user: User, taskId: ObjectId, initializingAnnotationId: ObjectId)( implicit m: MessagesProvider, - ctx: DBAccessContext): Fox[Annotation] = { - def useAsTemplateAndInsert(annotation: Annotation) = - for { - datasetName <- datasetDAO.getNameById(annotation._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - dataset <- datasetDAO.findOne(annotation._dataset) ?~> Messages("dataset.noAccess", datasetName) - (newSkeletonId, newVolumeId) <- tracingsFromBase(annotation, dataset) ?~> s"Failed to use annotation base as template for task $taskId with annotation base ${annotation._id}" - annotationLayers <- AnnotationLayer.layersFromIds(newSkeletonId, newVolumeId) - newAnnotation = annotation.copy( - _id = initializingAnnotationId, - _user = user._id, - annotationLayers = annotationLayers, - state = Active, - typ = AnnotationType.Task, - created = Instant.now, - modified = Instant.now - ) - _ <- annotationDAO.updateInitialized(newAnnotation) - } yield newAnnotation - + ctx: DBAccessContext): Fox[Annotation] = for { - annotationBase <- baseForTask(taskId) ?~> "Failed to retrieve annotation base." - result <- useAsTemplateAndInsert(annotationBase).toFox - } yield result - } + annotationBaseId <- annotationDAO.findBaseIdForTask(taskId) ?~> "Failed to retrieve annotation base id." + annotationBase <- annotationDAO.findOne(annotationBaseId) ?~> "Failed to retrieve annotation base." + datasetName <- datasetDAO.getNameById(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" + dataset <- datasetDAO.findOne(annotationBase._dataset) ?~> Messages("dataset.noAccess", datasetName) + _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + duplicatedAnnotationProto <- tracingStoreClient.duplicateAnnotation( + annotationBaseId, + initializingAnnotationId, + version = None, + isFromTask = false, + editPosition = None, + editRotation = None, + boundingBox = None, + magRestrictions = MagRestrictions.empty + ) + newAnnotation = annotationBase.copy( + _id = initializingAnnotationId, + _user = user._id, + annotationLayers = duplicatedAnnotationProto.layers.map(AnnotationLayer.fromProto).toList, + state = Active, + typ = AnnotationType.Task, + created = Instant.now, + modified = Instant.now + ) + _ <- annotationDAO.updateInitialized(newAnnotation) + } yield newAnnotation def createSkeletonTracingBase(datasetName: String, boundingBox: Option[BoundingBox], @@ -705,7 +696,7 @@ class AnnotationService @Inject()( updated <- annotationInformationProvider.provideAnnotation(typ, id, issuingUser) } yield updated - def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = + def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = // TODO: implement as update action? annotation.typ match { case AnnotationType.Explorational => Fox.failure("annotation.revert.tasksOnly") @@ -731,6 +722,20 @@ class AnnotationService @Inject()( } yield () } + private def tracingsFromBase(annotationBase: Annotation, dataset: Dataset)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[(Option[String], Option[String])] = + for { + _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + baseSkeletonIdOpt <- annotationBase.skeletonTracingId + baseVolumeIdOpt <- annotationBase.volumeTracingId + newSkeletonId: Option[String] <- Fox.runOptional(baseSkeletonIdOpt)(skeletonId => + tracingStoreClient.duplicateSkeletonTracing(skeletonId)) + newVolumeId: Option[String] <- Fox.runOptional(baseVolumeIdOpt)(volumeId => + tracingStoreClient.duplicateVolumeTracing(volumeId)) + } yield (newSkeletonId, newVolumeId) + private def settingsFor(annotation: Annotation)(implicit ctx: DBAccessContext) = if (annotation.typ == AnnotationType.Task || annotation.typ == AnnotationType.TracingBase) for { From 3d92ae3bc6f322ba6b4490112a6a5eb4c6234a4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 28 Oct 2024 13:21:09 +0100 Subject: [PATCH 128/361] move old routes to new update actions --- frontend/javascripts/admin/admin_rest_api.ts | 201 ++++++++---------- .../admin/task/task_create_form_view.tsx | 6 +- .../explorative_annotations_view.tsx | 14 +- .../accessors/skeletontracing_accessor.ts | 13 +- .../oxalis/model/actions/save_actions.ts | 2 +- .../oxalis/model/reducers/save_reducer.ts | 11 +- .../oxalis/model/sagas/annotation_saga.tsx | 23 +- .../oxalis/model/sagas/update_actions.ts | 55 ++++- .../oxalis/model_initialization.ts | 4 +- .../view/action-bar/merge_modal_view.tsx | 4 +- .../view/components/editable_text_label.tsx | 2 +- .../oxalis/view/jobs/train_ai_model.tsx | 13 +- .../left-border-tabs/layer_settings_tab.tsx | 78 +++++-- .../modals/add_volume_layer_modal.tsx | 61 ++++-- frontend/javascripts/router.tsx | 6 +- .../backend-snapshot-tests/annotations.e2e.ts | 14 +- .../skeletontracing_server_objects.ts | 13 +- frontend/javascripts/types/api_flow_types.ts | 10 +- 18 files changed, 319 insertions(+), 211 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 44da3577ece..be699cb98b5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1,70 +1,70 @@ import ResumableJS from "resumablejs"; import _ from "lodash"; import dayjs from "dayjs"; -import type { - APIActiveUser, - APIAnnotation, - APIAnnotationInfo, - APIAnnotationType, - APIAnnotationVisibility, - APIAnnotationWithTask, - APIBuildInfo, - APIConnectomeFile, - APIDataSource, - APIDataStore, - APIDataset, - APIDatasetId, - APIFeatureToggles, - APIHistogramData, - APIMapping, - APIMaybeUnimportedDataset, - APIMeshFile, - APIAvailableTasksReport, - APIOrganization, - APIOrganizationCompact, - APIProject, - APIProjectCreator, - APIProjectProgressReport, - APIProjectUpdater, - APIProjectWithStatus, - APIPublication, - APIMagRestrictions, - APIScript, - APIScriptCreator, - APIScriptUpdater, - APITask, - APITaskType, - APITeam, - APITimeInterval, - APITimeTrackingPerAnnotation, - APITimeTrackingSpan, - APITracingStore, - APIUpdateActionBatch, - APIUser, - APIUserLoggedTime, - APIUserTheme, - AnnotationLayerDescriptor, - AnnotationViewConfiguration, - EditableLayerProperties, - ExperienceDomainList, - ServerTracing, - TracingType, - ServerEditableMapping, - APICompoundType, - ZarrPrivateLink, - VoxelyticsWorkflowReport, - VoxelyticsChunkStatistics, - ShortLink, - VoxelyticsWorkflowListing, - APIPricingPlanStatus, - VoxelyticsLogLine, - APIUserCompact, - APIDatasetCompact, - MaintenanceInfo, - AdditionalCoordinate, - LayerLink, - VoxelSize, - APITimeTrackingPerUser, +import { + type APIActiveUser, + type APIAnnotation, + type APIAnnotationInfo, + type APIAnnotationType, + type APIAnnotationVisibility, + type APIAnnotationWithTask, + type APIBuildInfo, + type APIConnectomeFile, + type APIDataSource, + type APIDataStore, + type APIDataset, + type APIDatasetId, + type APIFeatureToggles, + type APIHistogramData, + type APIMapping, + type APIMaybeUnimportedDataset, + type APIMeshFile, + type APIAvailableTasksReport, + type APIOrganization, + type APIOrganizationCompact, + type APIProject, + type APIProjectCreator, + type APIProjectProgressReport, + type APIProjectUpdater, + type APIProjectWithStatus, + type APIPublication, + type APIMagRestrictions, + type APIScript, + type APIScriptCreator, + type APIScriptUpdater, + type APITask, + type APITaskType, + type APITeam, + type APITimeInterval, + type APITimeTrackingPerAnnotation, + type APITimeTrackingSpan, + type APITracingStore, + type APIUpdateActionBatch, + type APIUser, + type APIUserLoggedTime, + type APIUserTheme, + type AnnotationLayerDescriptor, + type AnnotationViewConfiguration, + type ExperienceDomainList, + type ServerTracing, + type TracingType, + type ServerEditableMapping, + type APICompoundType, + type ZarrPrivateLink, + type VoxelyticsWorkflowReport, + type VoxelyticsChunkStatistics, + type ShortLink, + type VoxelyticsWorkflowListing, + type APIPricingPlanStatus, + type VoxelyticsLogLine, + type APIUserCompact, + type APIDatasetCompact, + type MaintenanceInfo, + type AdditionalCoordinate, + type LayerLink, + type VoxelSize, + type APITimeTrackingPerUser, + AnnotationLayerType, } from "types/api_flow_types"; import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; @@ -640,25 +640,8 @@ export function setOthersMayEditForAnnotation( ); } -export function updateAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - tracingId: string, - layerProperties: EditableLayerProperties, -): Promise<{ - name: string | null | undefined; -}> { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/editLayer/${tracingId}`, - { - method: "PATCH", - data: layerProperties, - }, - ); -} - type AnnotationLayerCreateDescriptor = { - typ: "Skeleton" | "Volume"; + typ: AnnotationLayerType; name: string | null | undefined; autoFallbackLayer?: boolean; fallbackLayerName?: string | null | undefined; @@ -666,20 +649,6 @@ type AnnotationLayerCreateDescriptor = { magRestrictions?: APIMagRestrictions | null | undefined; }; -export function addAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - newAnnotationLayer: AnnotationLayerCreateDescriptor, -): Promise { - return Request.sendJSONReceiveJSON( - `/api/annotations/${annotationType}/${annotationId}/addAnnotationLayer`, - { - method: "PATCH", - data: newAnnotationLayer, - }, - ); -} - export function deleteAnnotationLayer( annotationId: string, annotationType: APIAnnotationType, @@ -749,7 +718,7 @@ export function duplicateAnnotation( }); } -export async function getAnnotationInformation( +export async function getMaybeOutdatedAnnotationInformation( annotationId: string, options: RequestOptions = {}, ): Promise { @@ -762,6 +731,19 @@ export async function getAnnotationInformation( return annotation; } +export async function getNewestAnnotationInformation( + annotationId: string, + tracingstoreUrl: string, +): Promise { + const infoUrl = `${tracingstoreUrl}/tracings/annotation/${annotationId}`; + const annotationWithMessages = await Request.receiveJSON(infoUrl); // TODO adjust return type and implement proto type in frontend + + // Extract the potential messages property before returning the task to avoid + // failing e2e tests in annotations.e2e.ts + const { messages: _messages, ...annotation } = annotationWithMessages; + return annotation; +} + export async function getAnnotationCompoundInformation( annotationId: string, annotationType: APICompoundType, @@ -802,14 +784,14 @@ export function createExplorational( if (typ === "skeleton") { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, name: "Skeleton", }, ]; } else if (typ === "volume") { layers = [ { - typ: "Volume", + typ: AnnotationLayerType.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -820,11 +802,11 @@ export function createExplorational( } else { layers = [ { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, name: "Skeleton", }, { - typ: "Volume", + typ: AnnotationLayerType.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -841,7 +823,9 @@ export async function getTracingsForAnnotation( annotation: APIAnnotation, version: number | null | undefined, ): Promise> { - const skeletonLayers = annotation.annotationLayers.filter((layer) => layer.typ === "Skeleton"); + const skeletonLayers = annotation.annotationLayers.filter( + (layer) => layer.typ === AnnotationLayerType.Skeleton, + ); const fullAnnotationLayers = await Promise.all( annotation.annotationLayers.map((layer) => getTracingForAnnotationType(annotation, layer, version), @@ -872,7 +856,7 @@ export async function acquireAnnotationMutex( export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - version?: number | null | undefined, // TODO: Use this parameter + version?: number | null | undefined, // TODOM: Use this parameter ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const tracingType = typ.toLowerCase() as "skeleton" | "volume"; @@ -1001,17 +985,6 @@ export async function importVolumeTracing( ); } -export function convertToHybridTracing( - annotationId: string, - fallbackLayerName: string | null | undefined, -): Promise { - return Request.receiveJSON(`/api/annotations/Explorational/${annotationId}/makeHybrid`, { - method: "PATCH", - // @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ method: "PATCH"; fallbackLayer... Remove this comment to see the full error message - fallbackLayerName, - }); -} - export async function downloadWithFilename(downloadUrl: string) { const link = document.createElement("a"); link.href = downloadUrl; diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 075086defad..8b90128c6dc 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -35,7 +35,7 @@ import { createTaskFromNML, createTasks, getActiveDatasetsOfMyOrganization, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getProjects, getScripts, getTask, @@ -481,12 +481,12 @@ function TaskCreateFormView({ taskId, history }: Props) { const annotationResponse = (await tryToAwaitPromise( - getAnnotationInformation(value, { + getMaybeOutdatedAnnotationInformation(value, { showErrorToast: false, }), )) || (await tryToAwaitPromise( - getAnnotationInformation(value, { + getMaybeOutdatedAnnotationInformation(value, { showErrorToast: false, }), )); diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 6710eba5b37..6378e6d6511 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -68,6 +68,8 @@ import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; import type { SearchProps } from "antd/lib/input"; import { getCombinedStatsFromServerAnnotation } from "oxalis/model/accessors/annotation_accessor"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { updateMetadataOfAnnotation } from "oxalis/model/sagas/update_actions"; const { Search } = Input; const pageLength: number = 1000; @@ -384,14 +386,10 @@ class ExplorativeAnnotationsView extends React.PureComponent { }; renameTracing(tracing: APIAnnotationInfo, name: string) { - editAnnotation(tracing.id, tracing.typ, { name }) - .then(() => { - Toast.success(messages["annotation.was_edited"]); - this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); - }) - .catch((error) => { - handleGenericError(error as Error, "Could not update the annotation name."); - }); + Store.dispatch( + pushSaveQueueTransaction([updateMetadataOfAnnotation(name)], "unused-tracing-id"), + ); + this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); } archiveAll = () => { diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index b30708bb6c9..f668fc3f7c5 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -1,10 +1,11 @@ import Maybe from "data.maybe"; import _ from "lodash"; -import type { - ServerTracing, - ServerSkeletonTracing, - APIAnnotation, - AnnotationLayerDescriptor, +import { + type ServerTracing, + type ServerSkeletonTracing, + type APIAnnotation, + type AnnotationLayerDescriptor, + AnnotationLayerType, } from "types/api_flow_types"; import type { Tracing, @@ -41,7 +42,7 @@ export function getSkeletonDescriptor( annotation: APIAnnotation, ): AnnotationLayerDescriptor | null | undefined { const skeletonLayers = annotation.annotationLayers.filter( - (descriptor) => descriptor.typ === "Skeleton", + (descriptor) => descriptor.typ === AnnotationLayerType.Skeleton, ); if (skeletonLayers.length > 0) { diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 870ba1f730f..64f2c04eadc 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -36,8 +36,8 @@ export const pushSaveQueueTransaction = ( ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - transactionId, tracingId, + transactionId, }) as const; export const saveNowAction = () => diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 73de1df5aca..2bfd14c7f49 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -193,12 +193,21 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } } +const layerIndependentActions = new Set([ + "updateTdCamera", + "revertToVersion", + "addLayerToAnnotation", + "deleteLayerFromAnnotation", + "updateLayerMetadata", + "updateMetadataOfAnnotation", +]); + export function addTracingIdToActions( actions: UpdateAction[], tracingId: string, ): Array { return actions.map((action) => { - if (action.name === "updateTdCamera" || action.name === "revertToVersion") { + if (layerIndependentActions.has(action.name)) { return action as UpdateAction; } return { diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index c93961d49cd..2b036703d5c 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -9,11 +9,7 @@ import { } from "oxalis/model/actions/annotation_actions"; import type { EditableAnnotation } from "admin/admin_rest_api"; import type { ActionPattern } from "redux-saga/effects"; -import { - editAnnotation, - updateAnnotationLayer, - acquireAnnotationMutex, -} from "admin/admin_rest_api"; +import { editAnnotation, acquireAnnotationMutex } from "admin/admin_rest_api"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, @@ -47,6 +43,8 @@ import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; +import { pushSaveQueueTransaction } from "../actions/save_actions"; +import { updateAnnotationLayerName } from "./update_actions"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. @@ -103,16 +101,11 @@ export function* pushAnnotationUpdateAsync(action: Action) { function* pushAnnotationLayerUpdateAsync(action: EditAnnotationLayerAction): Saga { const { tracingId, layerProperties } = action; - const annotationId = yield* select((storeState) => storeState.tracing.annotationId); - const annotationType = yield* select((storeState) => storeState.tracing.annotationType); - yield* retry( - SETTINGS_MAX_RETRY_COUNT, - SETTINGS_RETRY_DELAY, - updateAnnotationLayer, - annotationId, - annotationType, - tracingId, - layerProperties, + yield* put( + pushSaveQueueTransaction( + [updateAnnotationLayerName(tracingId, layerProperties.name)], + tracingId, + ), ); } diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index b7ab2a80b87..cbfdc61f9e5 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -10,7 +10,11 @@ import type { NumberLike, } from "oxalis/store"; import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; -import type { AdditionalCoordinate, MetadataEntryProto } from "types/api_flow_types"; +import type { + AdditionalCoordinate, + APIMagRestrictions, + MetadataEntryProto, +} from "types/api_flow_types"; export type NodeWithTreeId = { treeId: number; @@ -51,6 +55,10 @@ export type RevertToVersionUpdateAction = ReturnType; export type RemoveFallbackLayerUpdateAction = ReturnType; export type UpdateTdCameraUpdateAction = ReturnType; export type UpdateMappingNameUpdateAction = ReturnType; +type AddLayerToAnnotationUpdateAction = ReturnType; +type DeleteAnnotationLayerUpdateAction = ReturnType; +type UpdateAnnotationLayerNameUpdateAction = ReturnType; +type UpdateMetadataOfAnnotationUpdateAction = ReturnType; export type SplitAgglomerateUpdateAction = ReturnType; export type MergeAgglomerateUpdateAction = ReturnType; @@ -82,6 +90,10 @@ export type UpdateAction = | RemoveFallbackLayerUpdateAction | UpdateTdCameraUpdateAction | UpdateMappingNameUpdateAction + | AddLayerToAnnotationUpdateAction + | DeleteAnnotationLayerUpdateAction + | UpdateAnnotationLayerNameUpdateAction + | UpdateMetadataOfAnnotationUpdateAction | SplitAgglomerateUpdateAction | MergeAgglomerateUpdateAction; @@ -529,6 +541,47 @@ export function mergeAgglomerate( } as const; } +type AnnotationLayerCreationParameters = { + typ: "Skeleton" | "Volume"; + name: string | null | undefined; + autoFallbackLayer?: boolean; + fallbackLayerName?: string | null | undefined; + mappingName?: string | null | undefined; + magRestrictions?: APIMagRestrictions | null | undefined; +}; + +export function addLayerToAnnotation(parameters: AnnotationLayerCreationParameters) { + return { + name: "addLayerToAnnotation", + value: { layerParameters: parameters }, + } as const; +} + +export function deleteAnnotationLayer( + tracingId: string, + layerName: string, + typ: "Skeleton" | "Volume", +) { + return { + name: "deleteLayerFromAnnotation", + value: { tracingId, layerName, typ }, + } as const; +} + +export function updateAnnotationLayerName(tracingId: string, newLayerName: string) { + return { + name: "updateLayerMetadata", + value: { tracingId, layerName: newLayerName }, + } as const; +} + +export function updateMetadataOfAnnotation(name?: string, description?: string) { + return { + name: "updateMetadataOfAnnotation", + value: { name, description }, + } as const; +} + function enforceValidMetadata(metadata: MetadataEntryProto[]): MetadataEntryProto[] { // We do not want to save metadata with duplicate keys. Validation errors // will warn the user in case this exists. However, we allow duplicate keys in the diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 902216e0d26..c456701fbff 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -35,7 +35,7 @@ import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_ac import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { getTracingsForAnnotation, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getEmptySandboxAnnotationInformation, getDataset, getSharingTokenFromUrlParameters, @@ -132,7 +132,7 @@ export async function initialize( annotation = initialMaybeCompoundType != null ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getAnnotationInformation(annotationId); + : await getMaybeOutdatedAnnotationInformation(annotationId); datasetId = { name: annotation.dataSetName, owningOrganization: annotation.organization, diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index 9f3f5f52d9d..cd09d7c3f84 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -8,7 +8,7 @@ import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_ac import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getAnnotationCompoundInformation, getTracingForAnnotationType, } from "admin/admin_rest_api"; @@ -145,7 +145,7 @@ class _MergeModalView extends PureComponent { const { selectedExplorativeAnnotation } = this.state; if (selectedExplorativeAnnotation != null) { - const annotation = await getAnnotationInformation(selectedExplorativeAnnotation); + const annotation = await getMaybeOutdatedAnnotationInformation(selectedExplorativeAnnotation); this.mergeAnnotationIntoActiveTracing(annotation); } }; diff --git a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx index 6b514d5bc2d..443698164f7 100644 --- a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx +++ b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx @@ -14,7 +14,7 @@ type Rule = { }; export type EditableTextLabelProp = { value: string; - onChange: (...args: Array) => any; + onChange: (newValue: string) => any; rules?: Rule[]; rows?: number; markdown?: boolean; diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 1153c9eaf0e..51968b6bae6 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -21,7 +21,7 @@ import { getSegmentationLayers, } from "oxalis/model/accessors/dataset_accessor"; import { - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getDataset, getTracingForAnnotationType, runTraining, @@ -35,7 +35,12 @@ import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; import { V3 } from "libs/mjs"; -import type { APIAnnotation, APIDataset, ServerVolumeTracing } from "types/api_flow_types"; +import { + AnnotationLayerType, + type APIAnnotation, + type APIDataset, + type ServerVolumeTracing, +} from "types/api_flow_types"; import type { Vector3 } from "oxalis/constants"; import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; @@ -472,7 +477,7 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { - const annotation = await getAnnotationInformation(item.annotationId); + const annotation = await getMaybeOutdatedAnnotationInformation(item.annotationId); const dataset = await getDataset({ owningOrganization: annotation.organization, name: annotation.dataSetName, @@ -493,7 +498,7 @@ function AnnotationsCsvInput({ let userBoundingBoxes = volumeTracings[0]?.userBoundingBoxes; if (!userBoundingBoxes) { const skeletonLayer = annotation.annotationLayers.find( - (layer) => layer.typ === "Skeleton", + (layer) => layer.typ === AnnotationLayerType.Skeleton, ); if (skeletonLayer) { const skeletonTracing = await getTracingForAnnotationType(annotation, skeletonLayer); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 2a3a4b3fe93..bd9ef5bd1a8 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -21,6 +21,7 @@ import _ from "lodash"; import classnames from "classnames"; import update from "immutability-helper"; import { + AnnotationLayerType, APIAnnotationTypeEnum, type APIDataLayer, type APIDataset, @@ -49,8 +50,6 @@ import { findDataPositionForLayer, clearCache, findDataPositionForVolumeTracing, - convertToHybridTracing, - deleteAnnotationLayer, updateDatasetDefaultConfiguration, startComputeSegmentIndexFileJob, } from "admin/admin_rest_api"; @@ -131,6 +130,8 @@ import { getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; import defaultState from "oxalis/default_state"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; type DatasetSettingsProps = { userConfiguration: UserConfiguration; @@ -150,6 +151,8 @@ type DatasetSettingsProps = { onZoomToMag: (layerName: string, arg0: Vector3) => number; onChangeUser: (key: keyof UserConfiguration, value: any) => void; reloadHistogram: (layerName: string) => void; + addSkeletonLayerToAnnotation: () => void; + deleteAnnotationLayer: (tracingId: string, type: AnnotationLayerType, layerName: string) => void; tracing: Tracing; task: Task | null | undefined; onEditAnnotationLayer: (tracingId: string, layerProperties: EditableLayerProperties) => void; @@ -453,26 +456,39 @@ class DatasetSettings extends React.PureComponent { ); - getDeleteAnnotationLayerButton = (readableName: string, layer?: APIDataLayer) => ( + getDeleteAnnotationLayerButton = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + ) => (
this.deleteAnnotationLayerIfConfirmed(readableName, layer)} + onClick={() => this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId)} className="fas fa-trash icon-margin-right" />
); - getDeleteAnnotationLayerDropdownOption = (readableName: string, layer?: APIDataLayer) => ( -
this.deleteAnnotationLayerIfConfirmed(readableName, layer)}> + getDeleteAnnotationLayerDropdownOption = ( + readableName: string, + type: AnnotationLayerType, + tracingId: string, + layer?: APIDataLayer, + ) => ( +
this.deleteAnnotationLayerIfConfirmed(readableName, type, tracingId, layer)} + > Delete this annotation layer
); deleteAnnotationLayerIfConfirmed = async ( - readableAnnoationLayerName: string, + readableAnnotationLayerName: string, + type: AnnotationLayerType, + tracingId: string, layer?: APIDataLayer, ) => { const fallbackLayerNote = @@ -481,7 +497,7 @@ class DatasetSettings extends React.PureComponent { : ""; const shouldDelete = await confirmAsync({ title: `Deleting an annotation layer makes its content and history inaccessible. ${fallbackLayerNote}This cannot be undone. Are you sure you want to delete this layer?`, - okText: `Yes, delete annotation layer “${readableAnnoationLayerName}”`, + okText: `Yes, delete annotation layer “${readableAnnotationLayerName}”`, cancelText: "Cancel", maskClosable: true, closable: true, @@ -495,12 +511,8 @@ class DatasetSettings extends React.PureComponent { }, }); if (!shouldDelete) return; + this.props.deleteAnnotationLayer(tracingId, type, readableAnnotationLayerName); await Model.ensureSavedState(); - await deleteAnnotationLayer( - this.props.tracing.annotationId, - this.props.tracing.annotationType, - readableAnnoationLayerName, - ); location.reload(); }; @@ -623,6 +635,8 @@ class DatasetSettings extends React.PureComponent { const { intensityRange } = layerSettings; const layer = getLayerByName(dataset, layerName); const isSegmentation = layer.category === "segmentation"; + const layerType = + layer.category === "segmentation" ? AnnotationLayerType.Volume : AnnotationLayerType.Skeleton; const canBeMadeEditable = isSegmentation && layer.tracingId == null && this.props.controlMode === "TRACE"; const isVolumeTracing = isSegmentation ? layer.tracingId != null : false; @@ -687,7 +701,12 @@ class DatasetSettings extends React.PureComponent { ? { label: (
- {this.getDeleteAnnotationLayerDropdownOption(readableName, layer)} + {this.getDeleteAnnotationLayerDropdownOption( + readableName, + layerType, + layer.tracingId, + layer, + )}
), key: "deleteAnnotationLayer", @@ -1173,7 +1192,7 @@ class DatasetSettings extends React.PureComponent { const readableName = "Skeleton"; const skeletonTracing = enforceSkeletonTracing(tracing); const isOnlyAnnotationLayer = tracing.annotationLayers.length === 1; - const { showSkeletons } = skeletonTracing; + const { showSkeletons, tracingId } = skeletonTracing; const activeNodeRadius = getActiveNode(skeletonTracing)?.radius ?? 0; return ( @@ -1224,7 +1243,13 @@ class DatasetSettings extends React.PureComponent { }} > - {!isOnlyAnnotationLayer ? this.getDeleteAnnotationLayerButton(readableName) : null} + {!isOnlyAnnotationLayer + ? this.getDeleteAnnotationLayerButton( + readableName, + AnnotationLayerType.Skeleton, + tracingId, + ) + : null}
{showSkeletons ? ( @@ -1325,8 +1350,8 @@ class DatasetSettings extends React.PureComponent { }; addSkeletonAnnotationLayer = async () => { + this.props.addSkeletonLayerToAnnotation(); await Model.ensureSavedState(); - await convertToHybridTracing(this.props.tracing.annotationId, null); location.reload(); }; @@ -1639,6 +1664,25 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ reloadHistogram(layerName: string) { dispatch(reloadHistogramAction(layerName)); }, + + addSkeletonLayerToAnnotation() { + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Skeleton", + name: "skeleton", + fallbackLayerName: undefined, + }), + ], + "unused-tracing-id", + ), + ); + }, + + deleteAnnotationLayer(tracingId: string, type: AnnotationLayerType, layerName: string) { + dispatch(deleteAnnotationLayer(tracingId, layerName, type)); + }, }); const connector = connect(mapStateToProps, mapDispatchToProps); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index 565b8ace677..33885d71953 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -10,7 +10,6 @@ import { RestrictMagnificationSlider, } from "dashboard/advanced_dataset/create_explorative_modal"; import Store, { type Tracing } from "oxalis/store"; -import { addAnnotationLayer } from "admin/admin_rest_api"; import { getSomeMagInfoForDataset, getLayerByName, @@ -24,9 +23,12 @@ import { } from "oxalis/model/accessors/volumetracing_accessor"; import messages from "messages"; import InputComponent from "oxalis/view/components/input_component"; -import { api } from "oxalis/singletons"; +import { api, Model } from "oxalis/singletons"; import Toast from "libs/toast"; import { MappingStatusEnum } from "oxalis/constants"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { useDispatch } from "react-redux"; +import { addLayerToAnnotation } from "oxalis/model/sagas/update_actions"; export type ValidationResult = { isValid: boolean; message: string }; export function checkForLayerNameDuplication( @@ -101,6 +103,7 @@ export default function AddVolumeLayerModal({ const [selectedSegmentationLayerName, setSelectedSegmentationLayerName] = useState< string | undefined >(preselectedLayerName); + const dispatch = useDispatch(); const allReadableLayerNames = useMemo( () => getAllReadableLayerNames(dataset, tracing), [dataset, tracing], @@ -162,15 +165,23 @@ export default function AddVolumeLayerModal({ ); if (selectedSegmentationLayerName == null) { - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName: undefined, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - }); + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName: undefined, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + }), + ], + "unused-tracing-id", + ), + ); + await Model.ensureSavedState(); } else { if (selectedSegmentationLayer == null) { throw new Error("Segmentation layer is null"); @@ -189,16 +200,24 @@ export default function AddVolumeLayerModal({ maybeMappingName = mappingInfo.mappingName; } - await addAnnotationLayer(tracing.annotationId, tracing.annotationType, { - typ: "Volume", - name: newLayerName, - fallbackLayerName, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - mappingName: maybeMappingName, - }); + dispatch( + pushSaveQueueTransaction( + [ + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + mappingName: maybeMappingName, + }), + ], + "unused-tracing-id", + ), + ); + await Model.ensureSavedState(); } await api.tracing.hardReload(); diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index dcbc7815c49..178958aba06 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,6 +1,6 @@ import { createExplorational, - getAnnotationInformation, + getMaybeOutdatedAnnotationInformation, getOrganizationForDataset, getShortLink, } from "admin/admin_rest_api"; @@ -198,7 +198,9 @@ class ReactRouter extends React.Component { serverAuthenticationCallback = async ({ match }: ContextRouter) => { try { - const annotationInformation = await getAnnotationInformation(match.params.id || ""); + const annotationInformation = await getMaybeOutdatedAnnotationInformation( + match.params.id || "", + ); return annotationInformation.visibility === "Public"; } catch (_ex) { // Annotation could not be found diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 3e0833db14f..ffb9b0243c7 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -35,7 +35,7 @@ test.before("Reset database", async () => { }); test("getAnnotationInformation()", async (t) => { const annotationId = "570ba0092a7c0e980056fe9b"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); writeTypeCheckingFile(annotation, "annotation", "APIAnnotation"); t.snapshot(annotation); @@ -43,7 +43,7 @@ test("getAnnotationInformation()", async (t) => { test("getAnnotationInformation() for public annotation while logged out", async (t) => { setCurrToken("invalidToken"); const annotationId = "88135c192faeb34c0081c05d"; - const annotation = await api.getAnnotationInformation(annotationId); + const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); t.snapshot(annotation); setCurrToken(tokenUserA); @@ -78,7 +78,7 @@ test.serial("finishAnnotation() and reOpenAnnotation() for explorational", async }); test.serial("editAnnotation()", async (t) => { const annotationId = "68135c192faeb34c0081c05d"; - const originalAnnotation = await api.getAnnotationInformation(annotationId); + const originalAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); const { name, visibility, description } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; @@ -88,7 +88,7 @@ test.serial("editAnnotation()", async (t) => { visibility: newVisibility, description: newDescription, }); - const editedAnnotation = await api.getAnnotationInformation(annotationId); + const editedAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); t.is(editedAnnotation.description, newDescription); @@ -106,7 +106,7 @@ test.serial("finishAllAnnotations()", async (t) => { const annotationIds = ["78135c192faeb34c0081c05d", "78135c192faeb34c0081c05e"]; await api.finishAllAnnotations(annotationIds); const finishedAnnotations = await Promise.all( - annotationIds.map((id) => api.getAnnotationInformation(id)), + annotationIds.map((id) => api.getMaybeOutdatedAnnotationInformation(id)), ); t.is(finishedAnnotations.length, 2); finishedAnnotations.forEach((annotation) => { @@ -120,7 +120,9 @@ test.serial("createExplorational() and finishAnnotation()", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); t.snapshot(replaceVolatileValues(createdExplorational)); await api.finishAnnotation(createdExplorational.id, APIAnnotationTypeEnum.Explorational); - const finishedAnnotation = await api.getAnnotationInformation(createdExplorational.id); + const finishedAnnotation = await api.getMaybeOutdatedAnnotationInformation( + createdExplorational.id, + ); t.is(finishedAnnotation.state, "Finished"); }); test.serial("getTracingsForAnnotation()", async (t) => { diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 55a2c1fa71d..b2c4d8db95b 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -1,6 +1,10 @@ -import type { ServerSkeletonTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerSkeletonTracing, + type APIAnnotation, + AnnotationLayerType, +} from "types/api_flow_types"; export const tracing: ServerSkeletonTracing = { - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, id: "47e37793-d0be-4240-a371-87ce68561a13", trees: [ { @@ -173,11 +177,12 @@ export const annotation: APIAnnotation = { allowDownload: true, allowSave: true, }, + version: 0, annotationLayers: [ { - name: "Skeleton", + name: AnnotationLayerType.Skeleton, tracingId: "47e37793-d0be-4240-a371-87ce68561a13", - typ: "Skeleton", + typ: AnnotationLayerType.Skeleton, stats: {}, }, ], diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 4de739d10b4..42fcf83ce52 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -385,6 +385,10 @@ export enum TracingTypeEnum { volume = "volume", hybrid = "hybrid", } +export enum AnnotationLayerType { + Skeleton = "Skeleton", + Volume = "Volume", +} export type TracingType = keyof typeof TracingTypeEnum; export type APITaskType = { readonly id: string; @@ -467,12 +471,12 @@ export type APITask = { export type AnnotationLayerDescriptor = { name: string; tracingId: string; - typ: "Skeleton" | "Volume"; + typ: AnnotationLayerType; stats: TracingStats | EmptyObject; }; -export type EditableLayerProperties = Partial<{ +export type EditableLayerProperties = { name: string; -}>; +}; export type APIAnnotationInfo = { readonly annotationLayers: Array; readonly dataSetName: string; From 41cada12b4199b56db8cb11447808c07c50c66a7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 28 Oct 2024 16:57:34 +0100 Subject: [PATCH 129/361] spelling of layer type proto --- .../models/annotation/AnnotationLayerType.scala | 8 ++++---- webknossos-datastore/proto/Annotation.proto | 4 ++-- .../tracingstore/annotation/TSAnnotationService.scala | 10 +++++----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala index 9b1a7dd2d9d..2593bedce4f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayerType.scala @@ -9,14 +9,14 @@ object AnnotationLayerType extends ExtendedEnumeration { def toProto(annotationLayerType: AnnotationLayerType): AnnotationLayerTypeProto = annotationLayerType match { - case Skeleton => AnnotationLayerTypeProto.skeleton - case Volume => AnnotationLayerTypeProto.volume + case Skeleton => AnnotationLayerTypeProto.Skeleton + case Volume => AnnotationLayerTypeProto.Volume } def fromProto(p: AnnotationLayerTypeProto): AnnotationLayerType = p match { - case AnnotationLayerTypeProto.skeleton => Skeleton - case AnnotationLayerTypeProto.volume => Volume + case AnnotationLayerTypeProto.Skeleton => Skeleton + case AnnotationLayerTypeProto.Volume => Volume case AnnotationLayerTypeProto.Unrecognized(_) => Volume // unrecognized should never happen, artifact of proto code generation } diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 4fe56262b5c..e8938373307 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -3,8 +3,8 @@ syntax = "proto2"; package com.scalableminds.webknossos.datastore; enum AnnotationLayerTypeProto { - skeleton = 1; - volume = 2; + Skeleton = 1; + Volume = 2; } message AnnotationProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 39ccdb92d0a..627e4b640c7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -151,7 +151,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" _ <- bool2Fox( !annotationWithTracings.annotation.layers.exists( - _.`type` == AnnotationLayerTypeProto.skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" + _.`type` == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters, previousVersion = targetVersion - 1) @@ -366,7 +366,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll: Boolean)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = if (requestAll) - annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.skeleton).map(_.tracingId) + annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) else { (updates.flatMap { case u: SkeletonUpdateAction => Some(u.actionTracingId) @@ -375,7 +375,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } val volumeTracingIds = if (requestAll) - annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.volume).map(_.tracingId) + annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) else { (updates.flatMap { case u: VolumeUpdateAction => Some(u.actionTracingId) @@ -670,7 +670,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- layer.`type` match { - case AnnotationLayerTypeProto.volume => + case AnnotationLayerTypeProto.Volume => duplicateVolumeTracing(annotationId, layer.tracingId, version, @@ -679,7 +679,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss magRestrictions, editPosition, editRotation) - case AnnotationLayerTypeProto.skeleton => + case AnnotationLayerTypeProto.Skeleton => duplicateSkeletonTracing(annotationId, layer.tracingId, version, From 05b7e4075d17285813454940127844b8e9dcfa0d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 28 Oct 2024 19:01:53 +0100 Subject: [PATCH 130/361] ensure loading newest annotation version from tracing store and patching annotation info with that information when loading an annotation --- frontend/javascripts/admin/admin_rest_api.ts | 20 +++++++---- .../oxalis/model/helpers/proto_helpers.ts | 16 ++++++++- .../oxalis/model_initialization.ts | 33 ++++++++++++++++--- frontend/javascripts/types/api_flow_types.ts | 13 ++++++++ 4 files changed, 71 insertions(+), 11 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index f02cc6946d9..5fec4b98d60 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -65,6 +65,7 @@ import { type VoxelSize, type APITimeTrackingPerUser, AnnotationLayerType, + type APITracingStoreAnnotation, } from "types/api_flow_types"; import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; @@ -87,6 +88,7 @@ import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_ import { parseProtoListOfLong, parseProtoTracing, + parseProtoTracingStoreAnnotation, serializeProtoListOfLong, } from "oxalis/model/helpers/proto_helpers"; import type { RequestOptions } from "libs/request"; @@ -913,15 +915,21 @@ export function getUpdateActionLog( }); } -export function getNewestVersionForTracing( +export async function getNewestVersionOfTracing( tracingStoreUrl: string, annotationId: string, -): Promise { - return doWithToken((token) => - Request.receiveJSON( - `${tracingStoreUrl}/tracings/annotation/${annotationId}/newestVersion?token=${token}`, - ).then((obj) => obj.version), +): Promise { + const annotationArrayBuffer = await doWithToken((token) => + Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/annotation/${annotationId}?token=${token}`, + { + headers: { + Accept: "application/x-protobuf", + }, + }, + ), ); + return parseProtoTracingStoreAnnotation(annotationArrayBuffer); } export function hasSegmentIndexInDataStore( diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index cd3430779d9..3af4f4e3c13 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -1,9 +1,11 @@ import { Root } from "protobufjs/light"; -import type { ServerTracing } from "types/api_flow_types"; +import type { APITracingStoreAnnotation, ServerTracing } from "types/api_flow_types"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'SkeletonTracing.proto' or its ... Remove this comment to see the full error message import SkeletonTracingProto from "SkeletonTracing.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message import VolumeTracingProto from "VolumeTracing.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'AnnotationProto.proto' or its co... Remove this comment to see the full error message +import AnnotationProto from "Annotation.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'ListOfLong.proto' or its co... Remove this comment to see the full error message import ListOfLongProto from "ListOfLong.proto"; import { isBigInt } from "libs/utils"; @@ -64,4 +66,16 @@ export function parseProtoListOfLong( longs: Number, }).items; } + +export function parseProtoTracingStoreAnnotation(annotationArrayBuffer: ArrayBuffer): any { + const protoRoot = Root.fromJSON(AnnotationProto); + const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.AnnotationProto`); + const message = messageType.decode(new Uint8Array(annotationArrayBuffer)); + return messageType.toObject(message, { + arrays: true, + objects: true, + enums: String, + longs: Number, + }) as APITracingStoreAnnotation; +} export default {}; diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index c456701fbff..e753c152c7e 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -43,6 +43,7 @@ import { getDatasetViewConfiguration, getEditableMappingInfo, getAnnotationCompoundInformation, + getNewestVersionOfTracing, } from "admin/admin_rest_api"; import { dispatchMaybeFetchMeshFilesAsync, @@ -129,10 +130,34 @@ export async function initialize( if (initialCommandType.type === ControlModeEnum.TRACE) { const { annotationId } = initialCommandType; - annotation = - initialMaybeCompoundType != null - ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getMaybeOutdatedAnnotationInformation(annotationId); + if (initialMaybeCompoundType != null) { + annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); + } else { + let maybeOutdatedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); + const annotationFromTracingStore = await getNewestVersionOfTracing( + maybeOutdatedAnnotation.tracingStore.url, + maybeOutdatedAnnotation.id, + ); + const completeAnnotation = { + ...maybeOutdatedAnnotation, + name: annotationFromTracingStore.name, + description: annotationFromTracingStore.description, + }; + annotationFromTracingStore.layers.forEach((layer) => { + if ( + maybeOutdatedAnnotation.annotationLayers.find((l) => l.tracingId === layer.tracingId) == + null + ) { + completeAnnotation.annotationLayers.push({ + tracingId: layer.tracingId, + name: layer.name, + typ: layer.type, + stats: {}, + }); + } + }); + annotation = completeAnnotation; + } datasetId = { name: annotation.dataSetName, owningOrganization: annotation.organization, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 42fcf83ce52..ee1a3d92706 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -575,6 +575,19 @@ export type APITimeTrackingPerAnnotation = { timeMillis: number; annotationLayerStats: Array; }; +type APITracingStoreAnnotationLayer = { + tracingId: string; + name: string; + type: AnnotationLayerType; +}; + +export type APITracingStoreAnnotation = { + name: string; + description: string; + version: number; + layers: APITracingStoreAnnotationLayer[]; +}; + export type APITimeTrackingPerUser = { user: APIUserCompact & { email: string; From 8d5be054e30e765dfabcbf056aefcb01b9e14240 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 28 Oct 2024 19:29:12 +0100 Subject: [PATCH 131/361] send update annotation name and description as update actions --- frontend/javascripts/admin/admin_rest_api.ts | 2 -- .../model/actions/annotation_actions.ts | 4 +-- .../oxalis/model/sagas/annotation_saga.tsx | 33 ++++++++++++++++--- 3 files changed, 30 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 5fec4b98d60..b20e7978b7e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -598,8 +598,6 @@ export function reOpenAnnotation( } export type EditableAnnotation = { - name: string; - description: string; visibility: APIAnnotationVisibility; tags: Array; viewConfiguration?: AnnotationViewConfiguration; diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index 1aa7ff5e470..a362151b945 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -20,10 +20,10 @@ import Deferred from "libs/async/deferred"; import type { AdditionalCoordinate } from "types/api_flow_types"; type InitializeAnnotationAction = ReturnType; -type SetAnnotationNameAction = ReturnType; +export type SetAnnotationNameAction = ReturnType; type SetAnnotationVisibilityAction = ReturnType; export type EditAnnotationLayerAction = ReturnType; -type SetAnnotationDescriptionAction = ReturnType; +export type SetAnnotationDescriptionAction = ReturnType; type SetAnnotationAllowUpdateAction = ReturnType; type SetBlockedByUserAction = ReturnType; type SetUserBoundingBoxesAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 2b036703d5c..5bb550e9eb0 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -4,6 +4,8 @@ import type { Action } from "oxalis/model/actions/actions"; import { type EditAnnotationLayerAction, setAnnotationAllowUpdateAction, + type SetAnnotationDescriptionAction, + type SetAnnotationNameAction, setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; @@ -44,13 +46,36 @@ import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layo import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import { updateAnnotationLayerName } from "./update_actions"; +import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. */ const MAX_MAG_FOR_AGGLOMERATE_MAPPING = 16; +export function* pushAnnotationNameUpdateAction(action: SetAnnotationNameAction) { + const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); + if (!mayEdit) { + return; + } + yield* put( + pushSaveQueueTransaction([updateMetadataOfAnnotation(action.name)], "unused-tracing-id"), + ); +} + +export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDescriptionAction) { + const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); + if (!mayEdit) { + return; + } + yield* put( + pushSaveQueueTransaction( + [updateMetadataOfAnnotation(undefined, action.description)], + "unused-tracing-id", + ), + ); +} + export function* pushAnnotationUpdateAsync(action: Action) { const tracing = yield* select((state) => state.tracing); const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); @@ -68,9 +93,7 @@ export function* pushAnnotationUpdateAsync(action: Action) { }; // The extra type annotation is needed here for flow const editObject: Partial = { - name: tracing.name, visibility: tracing.visibility, - description: tracing.description, viewConfiguration, }; try { @@ -207,9 +230,9 @@ export function* watchAnnotationAsync(): Saga { // name, only the latest action is relevant. If `_takeEvery` was used, // all updates to the annotation name would be retried regularly, which // would also cause race conditions. - yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationUpdateAsync); + yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationNameUpdateAction); yield* takeLatest("SET_ANNOTATION_VISIBILITY", pushAnnotationUpdateAsync); - yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationUpdateAsync); + yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationDescriptionUpdateAction); yield* takeLatest( ((action: Action) => action.type === "UPDATE_LAYER_SETTING" && From 85dece0ef3a0661bb8ee338154ecd6ce62ba3f95 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 29 Oct 2024 09:24:03 +0100 Subject: [PATCH 132/361] WIP task creation --- app/controllers/AnnotationController.scala | 3 +- app/models/annotation/AnnotationService.scala | 1 + .../WKRemoteTracingStoreClient.scala | 33 ++++++++++++++----- app/models/task/TaskCreationService.scala | 7 +++- .../webknossos/datastore/rpc/RPCRequest.scala | 2 +- .../annotation/TSAnnotationService.scala | 6 ++++ .../controllers/TSAnnotationController.scala | 5 ++- .../volume/VolumeTracingService.scala | 31 +++++++++-------- ...alableminds.webknossos.tracingstore.routes | 2 +- 9 files changed, 63 insertions(+), 27 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 9a915500a90..689586935b4 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -441,7 +441,8 @@ class AnnotationController @Inject()( isFromTask = annotation._task.isDefined, editPosition = None, editRotation = None, - boundingBox = dataSource.map(_.boundingBox), + boundingBox = None, + datasetBoundingBox = dataSource.map(_.boundingBox), magRestrictions = MagRestrictions.empty ) newAnnotationLayers = newAnnotationProto.layers.map(AnnotationLayer.fromProto) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index bade14cfdf5..93d9269aaef 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -372,6 +372,7 @@ class AnnotationService @Inject()( editPosition = None, editRotation = None, boundingBox = None, + datasetBoundingBox = None, magRestrictions = MagRestrictions.empty ) newAnnotation = annotationBase.copy( diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index b66ccee76b6..e5e2bb04eda 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -90,6 +90,8 @@ class WKRemoteTracingStoreClient( .postProto[AnnotationProto](annotationProto) } + // Used in duplicate route. History and version are kept + // TODO: can we remove some params here, if they are used only in task case? def duplicateAnnotation(annotationId: ObjectId, newAnnotationId: ObjectId, version: Option[Long], @@ -97,6 +99,7 @@ class WKRemoteTracingStoreClient( editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions, ): Fox[AnnotationProto] = { logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) @@ -107,27 +110,41 @@ class WKRemoteTracingStoreClient( .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) + .addQueryStringOptional("datasetBoundingBox", datasetBoundingBox.map(_.toLiteral)) .addQueryString("isFromTask" -> isFromTask.toString) .addQueryStringOptional("minMag", magRestrictions.minStr) .addQueryStringOptional("maxMag", magRestrictions.maxStr) .postWithProtoResponse[AnnotationProto]()(AnnotationProto) } - def duplicateSkeletonTracing(skeletonTracingId: String, - versionString: Option[String] = None, - isFromTask: Boolean = false, + // Used in task creation. History is dropped, new version will be zero. + // TODO: currently also used in resetToBase. Fix that. + def duplicateSkeletonTracing(skeletonTracingId: String, // TODO: might also need annotation id editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = ??? + boundingBox: Option[BoundingBox] = None): Fox[String] = + rpc(s"${tracingStore.url}/tracings/skeleton/$skeletonTracingId/duplicate").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) + .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) + .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) + .postWithJsonResponse[String]() + // Used in task creation. History is dropped, new version will be zero. + // TODO: currently also used in resetToBase. Fix that. def duplicateVolumeTracing(volumeTracingId: String, - isFromTask: Boolean = false, - datasetBoundingBox: Option[BoundingBox] = None, magRestrictions: MagRestrictions = MagRestrictions.empty, - downsample: Boolean = false, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, - boundingBox: Option[BoundingBox] = None): Fox[String] = ??? + boundingBox: Option[BoundingBox] = None): Fox[String] = + rpc(s"${tracingStore.url}/tracings/volume/$volumeTracingId/duplicate").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) + .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) + .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) + .addQueryStringOptional("minMag", magRestrictions.minStr) + .addQueryStringOptional("maxMag", magRestrictions.maxStr) + .postWithJsonResponse[String]() def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 865316eef8b..cd79b8e3287 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -150,7 +150,12 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { volumeTracingOpt <- baseAnnotation.volumeTracingId newVolumeTracingId <- volumeTracingOpt - .map(id => tracingStoreClient.duplicateVolumeTracing(id, magRestrictions = magRestrictions)) + .map( + id => + tracingStoreClient.duplicateVolumeTracing(id, + editPosition = Some(params.editPosition), + editRotation = Some(params.editRotation), + magRestrictions = magRestrictions)) .getOrElse( annotationService .createVolumeTracingBase( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index c0d9b8695d2..3830553d25b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -114,7 +114,7 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseJsonResponse(performRequest) } - def postWithJsonResponse[T: Reads]: Fox[T] = { + def postWithJsonResponse[T: Reads](): Fox[T] = { request = request.withMethod("POST") parseJsonResponse(performRequest) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 627e4b640c7..914be0e07ff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -629,6 +629,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { currentAnnotation <- get(annotationId, version) @@ -641,6 +642,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editPosition, editRotation, boundingBox, + datasetBoundingBox, magRestrictions)) _ <- duplicateUpdates(annotationId, newAnnotationId) duplicatedAnnotation = currentAnnotation.copy(layers = newLayers) @@ -667,6 +669,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- layer.`type` match { @@ -676,6 +679,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version, isFromTask, boundingBox, + datasetBoundingBox, magRestrictions, editPosition, editRotation) @@ -697,6 +701,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version: Long, isFromTask: Boolean, boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { @@ -708,6 +713,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss sourceTracing, isFromTask, boundingBox, + datasetBoundingBox, magRestrictions, editPosition, editRotation, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 2058d2d6e23..cbc5d219aa9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -109,7 +109,8 @@ class TSAnnotationController @Inject()( maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], - boundingBox: Option[String]): Action[AnyContent] = + boundingBox: Option[String], + datasetBoundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { @@ -118,6 +119,7 @@ class TSAnnotationController @Inject()( editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + datasetBoundingBoxParsed <- Fox.runOptional(datasetBoundingBox)(BoundingBox.fromLiteral) magRestrictions = MagRestrictions(minMag, maxMag) annotationProto <- annotationService.duplicate(annotationId, newAnnotationId, @@ -126,6 +128,7 @@ class TSAnnotationController @Inject()( editPositionParsed, editRotationParsed, boundingBoxParsed, + datasetBoundingBoxParsed, magRestrictions) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 291fd749f39..39523d44a08 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -491,6 +491,7 @@ class VolumeTracingService @Inject()( sourceTracing: VolumeTracing, isFromTask: Boolean, boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], @@ -516,21 +517,23 @@ class VolumeTracingService @Inject()( } yield newTracing } - @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand private def addBoundingBoxFromTaskIfRequired(tracing: VolumeTracing, - fromTask: Boolean, - datasetBoundingBox: Option[BoundingBox]): VolumeTracing = - if (fromTask && datasetBoundingBox.isDefined) { - val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 - tracing - .addUserBoundingBoxes( - NamedBoundingBoxProto(newId, - Some("task bounding box"), - Some(true), - Some(getRandomColor), - tracing.boundingBox)) - .withBoundingBox(datasetBoundingBox.get) - } else tracing + isFromTask: Boolean, + datasetBoundingBoxOpt: Option[BoundingBox]): VolumeTracing = + datasetBoundingBoxOpt match { + case Some(datasetBoundingBox) if isFromTask => { + val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 + tracing + .addUserBoundingBoxes( + NamedBoundingBoxProto(newId, + Some("task bounding box"), + Some(true), + Some(getRandomColor), + tracing.boundingBox)) + .withBoundingBox(datasetBoundingBox) + } + case _ => tracing + } def duplicateVolumeData(sourceTracingId: String, sourceTracing: VolumeTracing, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index e5c6ec7a9a1..9f5f3337c2a 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,7 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) -POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String], datasetBoundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From a543415fe3bca347bf62bbc22dc16a57bb282988 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 29 Oct 2024 09:56:23 +0100 Subject: [PATCH 133/361] fix task creation without base --- app/models/annotation/AnnotationService.scala | 14 ++++++++++++-- .../annotation/WKRemoteTracingStoreClient.scala | 3 ++- app/models/task/TaskCreationService.scala | 5 +++-- .../models/annotation/AnnotationLayer.scala | 5 ++++- 4 files changed, 21 insertions(+), 6 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 93d9269aaef..d821d478068 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -364,6 +364,8 @@ class AnnotationService @Inject()( dataset <- datasetDAO.findOne(annotationBase._dataset) ?~> Messages("dataset.noAccess", datasetName) _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) tracingStoreClient <- tracingStoreService.clientFor(dataset) + _ = logger.info( + f"task assignment. creating annotation $initializingAnnotationId from base $annotationBaseId for task $taskId") duplicatedAnnotationProto <- tracingStoreClient.duplicateAnnotation( annotationBaseId, initializingAnnotationId, @@ -458,13 +460,15 @@ class AnnotationService @Inject()( case _ => annotationDAO.abortInitializingAnnotation(initializingAnnotationId) } - def createAnnotationBase( + // Save annotation base to postgres AND annotation proto to tracingstore. + def createAndSaveAnnotationBase( taskFox: Fox[Task], userId: ObjectId, skeletonTracingIdBox: Box[Option[String]], volumeTracingIdBox: Box[Option[String]], datasetId: ObjectId, - description: Option[String] + description: Option[String], + tracingStoreClient: WKRemoteTracingStoreClient )(implicit ctx: DBAccessContext): Fox[Unit] = for { task <- taskFox @@ -481,6 +485,12 @@ class AnnotationService @Inject()( annotationLayers, description.getOrElse(""), typ = AnnotationType.TracingBase) + annotationBaseProto = AnnotationProto(name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + layers = annotationLayers.map(_.toProto)) + _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) + _ = logger.info(s"inserting base annotation ${annotationBase._id} for task ${task._id}") _ <- annotationDAO.insertOne(annotationBase) } yield () diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index e5e2bb04eda..6f072bbfc8d 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -83,7 +83,8 @@ class WKRemoteTracingStoreClient( } def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { - logger.debug("Called to save AnnotationProto." + baseInfo) + logger.debug( + f"Called to save AnnotationProto $annotationId with layers ${annotationProto.layers.map(_.tracingId).mkString(",")}." + baseInfo) rpc(s"${tracingStore.url}/tracings/annotation/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("annotationId" -> annotationId.toString) diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index cd79b8e3287..3fbcb651bf7 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -428,13 +428,14 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .toList createAnnotationBaseResults: List[Fox[Unit]] = zipped.map( tuple => - annotationService.createAnnotationBase( + annotationService.createAndSaveAnnotationBase( taskFox = tuple._3, requestingUser._id, skeletonTracingIdBox = tuple._2._1, volumeTracingIdBox = tuple._2._2, dataset._id, - description = tuple._1.map(_._1.description).openOr(None) + description = tuple._1.map(_._1.description).openOr(None), + tracingStoreClient )) warnings <- warnIfTeamHasNoAccess(fullTasks.map(_._1), dataset, requestingUser) zippedTasksAndAnnotations = taskObjects zip createAnnotationBaseResults diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index 5b1ba9b6607..b9552941679 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -17,7 +17,10 @@ case class AnnotationLayer( typ: AnnotationLayerType, name: String, stats: JsObject, -) +) { + def toProto: AnnotationLayerProto = + AnnotationLayerProto(tracingId, name, AnnotationLayerType.toProto(typ)) +} object AnnotationLayer extends FoxImplicits { implicit val jsonFormat: OFormat[AnnotationLayer] = Json.format[AnnotationLayer] From d8d7fd4080b75e745f2bfb0af91eef9d28f21c0c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 29 Oct 2024 10:25:15 +0100 Subject: [PATCH 134/361] wip task creation from base id --- .../annotation/TSAnnotationService.scala | 70 +++++++++++-------- .../SkeletonTracingController.scala | 31 ++++++++ .../controllers/TSAnnotationController.scala | 2 +- .../controllers/VolumeTracingController.scala | 35 ++++++++++ .../EditableMappingService.scala | 30 +++++--- ...alableminds.webknossos.tracingstore.routes | 2 + 6 files changed, 128 insertions(+), 42 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 914be0e07ff..04206a50b8f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -677,6 +677,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss duplicateVolumeTracing(annotationId, layer.tracingId, version, + version, isFromTask, boundingBox, datasetBoundingBox, @@ -687,6 +688,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss duplicateSkeletonTracing(annotationId, layer.tracingId, version, + version, isFromTask, editPosition, editRotation, @@ -695,19 +697,20 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield layer.copy(tracingId = newTracingId) - private def duplicateVolumeTracing( - annotationId: String, - sourceTracingId: String, - version: Long, - isFromTask: Boolean, - boundingBox: Option[BoundingBox], - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { + def duplicateVolumeTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newVersion: Long, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate for { - sourceTracing <- findVolume(annotationId, sourceTracingId, Some(version)) + sourceTracing <- findVolume(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) newTracing <- volumeTracingService.adaptVolumeForDuplicate(sourceTracingId, newTracingId, sourceTracing, @@ -717,44 +720,49 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss magRestrictions, editPosition, editRotation, - version) - _ <- tracingDataStore.volumes.put(newTracingId, version, newTracing) + newVersion) + _ <- tracingDataStore.volumes.put(newTracingId, newVersion, newTracing) _ <- Fox.runIf(!newTracing.getHasEditableMapping)( volumeTracingService.duplicateVolumeData(sourceTracingId, sourceTracing, newTracingId, newTracing)) _ <- Fox.runIf(newTracing.getHasEditableMapping)( - duplicateEditableMapping(annotationId, sourceTracingId, newTracingId, version)) + duplicateEditableMapping(sourceAnnotationId, sourceTracingId, newTracingId, sourceVersion, newVersion)) } yield newTracingId } - private def duplicateEditableMapping(annotationId: String, + private def duplicateEditableMapping(sourceAnnotationId: String, sourceTracingId: String, newTracingId: String, - version: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + sourceVersion: Long, + newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = for { - editableMappingInfo <- findEditableMappingInfo(annotationId, sourceTracingId, Some(version)) - _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, version, toProtoBytes(editableMappingInfo)) - _ <- editableMappingService.duplicateSegmentToAgglomerate(sourceTracingId, newTracingId, version) - _ <- editableMappingService.duplicateAgglomerateToGraph(sourceTracingId, newTracingId, version) + editableMappingInfo <- findEditableMappingInfo(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) + _ <- tracingDataStore.editableMappingsInfo.put(newTracingId, newVersion, toProtoBytes(editableMappingInfo)) + _ <- editableMappingService.duplicateSegmentToAgglomerate(sourceTracingId, + newTracingId, + sourceVersion, + newVersion) + _ <- editableMappingService.duplicateAgglomerateToGraph(sourceTracingId, newTracingId, sourceVersion, newVersion) } yield () - private def duplicateSkeletonTracing( - annotationId: String, - tracingId: String, - version: Long, - isFromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { + def duplicateSkeletonTracing( + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newVersion: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate for { - skeleton <- findSkeleton(annotationId, tracingId, Some(version)) + skeleton <- findSkeleton(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) adaptedSkeleton = skeletonTracingService.adaptSkeletonForDuplicate(skeleton, isFromTask, editPosition, editRotation, boundingBox, - version) - _ <- tracingDataStore.skeletons.put(newTracingId, version, adaptedSkeleton) + newVersion) + _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index bcfe15941fd..40fca232591 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest @@ -142,4 +143,34 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateSkeletonTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + isFromTask = false + ) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index cbc5d219aa9..c27795c0cb4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -114,7 +114,7 @@ class TSAnnotationController @Inject()( Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index a56a441fba6..8c01ae36346 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -414,4 +414,39 @@ class VolumeTracingController @Inject()( } } + // Used in task creation. History is dropped. Caller is responsible to create and save a matching AnnotationProto object + def duplicate(tracingId: String, + minMag: Option[Int], + maxMag: Option[Int], + editPosition: Option[String], + editRotation: Option[String], + boundingBox: Option[String]): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) + editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) + editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) + boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) + magRestrictions = MagRestrictions(minMag, maxMag) + newestSourceVersion <- annotationService.currentMaterializableVersion(annotationId) + newTracingId <- annotationService.duplicateVolumeTracing( + annotationId, + sourceTracingId = tracingId, + sourceVersion = newestSourceVersion, + newVersion = 0, + editPosition = editPositionParsed, + editRotation = editRotationParsed, + boundingBox = boundingBoxParsed, + datasetBoundingBox = None, + isFromTask = false, + magRestrictions = magRestrictions + ) + } yield Ok(Json.toJson(newTracingId)) + } + } + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 66dd0e32eb7..53552cc74b1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -135,31 +135,41 @@ class EditableMappingService @Inject()( } yield newEditableMappingInfo } - def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, version: Long): Fox[Unit] = { - val iterator = + def duplicateSegmentToAgglomerate(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsSegmentToAgglomerate, - Some(version)) + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { chunkId <- chunkIdFromSegmentToAgglomerateKey(keyValuePair.key).toFox newKey = segmentToAgglomerateKey(newId, chunkId) - _ <- tracingDataStore.editableMappingsSegmentToAgglomerate.put(newKey, version = version, keyValuePair.value) + _ <- tracingDataStore.editableMappingsSegmentToAgglomerate.put(newKey, + version = newVersion, + keyValuePair.value) } yield () }.toList) } yield () } - def duplicateAgglomerateToGraph(sourceTracingId: String, newId: String, version: Long): Fox[Unit] = { - val iterator = - new VersionedFossilDbIterator(sourceTracingId, tracingDataStore.editableMappingsAgglomerateToGraph, Some(version)) + def duplicateAgglomerateToGraph(sourceTracingId: String, + newId: String, + sourceVersion: Long, + newVersion: Long): Fox[Unit] = { + val sourceIterator = + new VersionedFossilDbIterator(sourceTracingId, + tracingDataStore.editableMappingsAgglomerateToGraph, + Some(sourceVersion)) for { - _ <- Fox.combined(iterator.map { keyValuePair => + _ <- Fox.combined(sourceIterator.map { keyValuePair => for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(keyValuePair.key).toFox newKey = agglomerateGraphKey(newId, agglomerateId) - _ <- tracingDataStore.editableMappingsAgglomerateToGraph.put(newKey, version = version, keyValuePair.value) + _ <- tracingDataStore.editableMappingsAgglomerateToGraph.put(newKey, version = newVersion, keyValuePair.value) } yield () }.toList) } yield () diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 9f5f3337c2a..06b9967a84c 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -22,6 +22,7 @@ GET /volume/:tracingId/allDataZip POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) +POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(tracingId: String, segmentId: Long) POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(tracingId: String) GET /volume/:tracingId/findData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.findData(tracingId: String) @@ -72,4 +73,5 @@ POST /skeleton/saveMultiple POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) +POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From 41a326c5142067e4a440f6af6ca6558000b0a71a Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 29 Oct 2024 11:19:07 +0100 Subject: [PATCH 135/361] fix volume task creation with base id --- .../annotation/TSAnnotationService.scala | 48 +++++++++---------- .../controllers/VolumeTracingController.scala | 8 +--- .../tracings/TracingService.scala | 2 - .../volume/VolumeTracingService.scala | 28 ++++++----- 4 files changed, 40 insertions(+), 46 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 04206a50b8f..a449d6d80d0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -564,14 +564,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version: Option[Long] = None, useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = - if (tracingId == TracingId.dummy) - Fox.successful(volumeTracingService.dummyTracing) - else { - for { - annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) // TODO is applyUpdates still needed? - tracing <- annotation.getVolume(tracingId) - } yield tracing - } + for { + annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) // TODO is applyUpdates still needed? + tracing <- annotation.getVolume(tracingId) + } yield tracing def findSkeleton( annotationId: String, @@ -698,16 +694,16 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield layer.copy(tracingId = newTracingId) def duplicateVolumeTracing( - sourceAnnotationId: String, - sourceTracingId: String, - sourceVersion: Long, - newVersion: Long, - isFromTask: Boolean, - boundingBox: Option[BoundingBox], - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newVersion: Long, + isFromTask: Boolean, + boundingBox: Option[BoundingBox], + datasetBoundingBox: Option[BoundingBox], + magRestrictions: MagRestrictions, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate for { sourceTracing <- findVolume(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) @@ -745,14 +741,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield () def duplicateSkeletonTracing( - sourceAnnotationId: String, - sourceTracingId: String, - sourceVersion: Long, - newVersion: Long, - isFromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { + sourceAnnotationId: String, + sourceTracingId: String, + sourceVersion: Long, + newVersion: Long, + isFromTask: Boolean, + editPosition: Option[Vec3Int], + editRotation: Option[Vec3Double], + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { val newTracingId = TracingId.generate for { skeleton <- findSkeleton(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 8c01ae36346..f3458ab24c9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -178,9 +178,7 @@ class VolumeTracingController @Inject()( initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") magRestrictions = MagRestrictions(minMag, maxMag) - mags <- volumeTracingService - .initializeWithData(annotationId, tracingId, tracing, initialData, magRestrictions) - .toFox + mags <- volumeTracingService.initializeWithData(tracingId, tracing, initialData, magRestrictions).toFox _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) } yield Ok(Json.toJson(tracingId)) } @@ -218,9 +216,7 @@ class VolumeTracingController @Inject()( annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") - mags <- volumeTracingService - .initializeWithDataMultiple(annotationId, tracingId, tracing, initialData) - .toFox + mags <- volumeTracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) } yield Ok(Json.toJson(tracingId)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index c0f06df09cc..c458ec28d17 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -34,8 +34,6 @@ trait TracingService[T <: GeneratedMessage] def tracingMigrationService: TracingMigrationService[T] - def dummyTracing: T - implicit def tracingCompanion: GeneratedMessageCompanion[T] // this should be longer than maxCacheTime in webknossos/AnnotationStore diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 39523d44a08..be29e9fa621 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -313,7 +313,7 @@ class VolumeTracingService @Inject()( } yield () } - def initializeWithDataMultiple(annotationId: String, tracingId: String, tracing: VolumeTracing, initialData: File)( + def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File)( implicit mp: MessagesProvider, tc: TokenContext): Fox[Set[Vec3Int]] = if (tracing.version != 0L) @@ -382,8 +382,7 @@ class VolumeTracingService @Inject()( } yield mags } - def initializeWithData(annotationId: String, - tracingId: String, + def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File, magRestrictions: MagRestrictions)(implicit tc: TokenContext): Fox[Set[Vec3Int]] = @@ -496,7 +495,7 @@ class VolumeTracingService @Inject()( editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[VolumeTracing] = { - val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, boundingBox) + val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, datasetBoundingBox) val tracingWithMagRestrictions = restrictMagList(tracingWithBB, magRestrictions) for { fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) @@ -521,7 +520,7 @@ class VolumeTracingService @Inject()( isFromTask: Boolean, datasetBoundingBoxOpt: Option[BoundingBox]): VolumeTracing = datasetBoundingBoxOpt match { - case Some(datasetBoundingBox) if isFromTask => { + case Some(datasetBoundingBox) if isFromTask => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 tracing .addUserBoundingBoxes( @@ -531,19 +530,19 @@ class VolumeTracingService @Inject()( Some(getRandomColor), tracing.boundingBox)) .withBoundingBox(datasetBoundingBox) - } case _ => tracing } def duplicateVolumeData(sourceTracingId: String, sourceTracing: VolumeTracing, newTracingId: String, - newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = + newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + var bucketCount = 0 for { isTemporaryTracing <- isTemporaryTracing(sourceTracingId) sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream( - Some(newTracing.version)) + Some(sourceTracing.version)) destinationDataLayer = volumeTracingLayer(newTracingId, newTracing) fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( @@ -561,6 +560,7 @@ class VolumeTracingService @Inject()( if (newTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { for { _ <- saveBucket(destinationDataLayer, bucketPosition, bucketData, newTracing.version) + _ = bucketCount += 1 _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( updateSegmentIndex( segmentIndexBuffer, @@ -574,8 +574,11 @@ class VolumeTracingService @Inject()( } yield () } else Fox.successful(()) } + _ = logger.info( + s"Duplicated $bucketCount volume buckets from $sourceTracingId v${sourceTracing.version} to $newTracingId v${newTracing.version}.") _ <- segmentIndexBuffer.flush() } yield () + } private def volumeTracingLayer( tracingId: String, @@ -605,6 +608,7 @@ class VolumeTracingService @Inject()( toCache) } yield id + // TODO use or remove def downsample(annotationId: String, tracingId: String, oldTracingId: String, newTracing: VolumeTracing)( implicit tc: TokenContext): Fox[Unit] = for { @@ -784,7 +788,9 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - fallbackLayer <- getFallbackLayer(tracingSelectors.head.tracingId, tracings.head) // TODO can we get rid of the head? + firstTracingSelector <- tracingSelectors.headOption ?~> "merge.noTracings" + firstTracing <- tracings.headOption ?~> "merge.noTracings" + fallbackLayer <- getFallbackLayer(firstTracingSelector.tracingId, firstTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, @@ -882,8 +888,6 @@ class VolumeTracingService @Inject()( } } - def dummyTracing: VolumeTracing = ??? - def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(VolumeTracing, String)])( implicit tc: TokenContext): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { @@ -892,7 +896,7 @@ class VolumeTracingService @Inject()( remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - // TODO_ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + // TODO: _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty From 11cbcd294eb3ace4c33b898c5395f2394db5fe89 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 09:55:38 +0100 Subject: [PATCH 136/361] rename annotation.proto layers to annotationLayers, simplify duplicate api --- app/controllers/AnnotationController.scala | 8 +-- .../WKRemoteTracingStoreController.scala | 2 +- app/models/annotation/AnnotationService.scala | 22 +++---- .../WKRemoteTracingStoreClient.scala | 15 +---- .../oxalis/model_initialization.ts | 2 +- frontend/javascripts/types/api_flow_types.ts | 2 +- webknossos-datastore/proto/Annotation.proto | 2 +- .../annotation/AnnotationWithTracings.scala | 10 +-- .../annotation/TSAnnotationService.scala | 62 ++++++------------- .../controllers/TSAnnotationController.scala | 18 +----- ...alableminds.webknossos.tracingstore.routes | 2 +- 11 files changed, 46 insertions(+), 99 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 689586935b4..62b64efea09 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -439,13 +439,9 @@ class AnnotationController @Inject()( newAnnotationId, version = None, isFromTask = annotation._task.isDefined, - editPosition = None, - editRotation = None, - boundingBox = None, - datasetBoundingBox = dataSource.map(_.boundingBox), - magRestrictions = MagRestrictions.empty + datasetBoundingBox = dataSource.map(_.boundingBox) ) - newAnnotationLayers = newAnnotationProto.layers.map(AnnotationLayer.fromProto) + newAnnotationLayers = newAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) clonedAnnotation <- annotationService.createFrom(user, dataset, newAnnotationLayers, diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 1e05a4df3f5..b659700a52f 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -67,7 +67,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore for { annotationIdValidated <- ObjectId.fromString(annotationId) existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) - newLayersProto = request.body.layers + newLayersProto = request.body.annotationLayers existingLayerIds = existingLayers.map(_.tracingId).toSet newLayerIds = newLayersProto.map(_.tracingId).toSet layerIdsToDelete = existingLayerIds.diff(newLayerIds) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index d821d478068..759b8efe180 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -281,7 +281,7 @@ class AnnotationService @Inject()( annotationProto = AnnotationProto(name = Some(AnnotationDefaults.defaultName), description = Some(AnnotationDefaults.defaultDescription), version = 0L, - layers = layersProto) + annotationLayers = layersProto) _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) } yield newAnnotationLayers @@ -370,17 +370,13 @@ class AnnotationService @Inject()( annotationBaseId, initializingAnnotationId, version = None, - isFromTask = false, - editPosition = None, - editRotation = None, - boundingBox = None, - datasetBoundingBox = None, - magRestrictions = MagRestrictions.empty + isFromTask = false, // isFromTask is when duplicate is called on a task annotation, not when a task is assigned + datasetBoundingBox = None ) newAnnotation = annotationBase.copy( _id = initializingAnnotationId, _user = user._id, - annotationLayers = duplicatedAnnotationProto.layers.map(AnnotationLayer.fromProto).toList, + annotationLayers = duplicatedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto).toList, state = Active, typ = AnnotationType.Task, created = Instant.now, @@ -485,10 +481,12 @@ class AnnotationService @Inject()( annotationLayers, description.getOrElse(""), typ = AnnotationType.TracingBase) - annotationBaseProto = AnnotationProto(name = Some(AnnotationDefaults.defaultName), - description = Some(AnnotationDefaults.defaultDescription), - version = 0L, - layers = annotationLayers.map(_.toProto)) + annotationBaseProto = AnnotationProto( + name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + annotationLayers = annotationLayers.map(_.toProto) + ) _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) _ = logger.info(s"inserting base annotation ${annotationBase._id} for task ${task._id}") _ <- annotationDAO.insertOne(annotationBase) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 6f072bbfc8d..a5bb17dd70e 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -84,7 +84,7 @@ class WKRemoteTracingStoreClient( def saveAnnotationProto(annotationId: ObjectId, annotationProto: AnnotationProto): Fox[Unit] = { logger.debug( - f"Called to save AnnotationProto $annotationId with layers ${annotationProto.layers.map(_.tracingId).mkString(",")}." + baseInfo) + f"Called to save AnnotationProto $annotationId with layers ${annotationProto.annotationLayers.map(_.tracingId).mkString(",")}." + baseInfo) rpc(s"${tracingStore.url}/tracings/annotation/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("annotationId" -> annotationId.toString) @@ -92,29 +92,18 @@ class WKRemoteTracingStoreClient( } // Used in duplicate route. History and version are kept - // TODO: can we remove some params here, if they are used only in task case? def duplicateAnnotation(annotationId: ObjectId, newAnnotationId: ObjectId, version: Option[Long], isFromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions, - ): Fox[AnnotationProto] = { + datasetBoundingBox: Option[BoundingBox]): Fox[AnnotationProto] = { logger.debug(s"Called to duplicate annotation $annotationId." + baseInfo) rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/duplicate").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("newAnnotationId" -> newAnnotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) - .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) - .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) - .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) .addQueryStringOptional("datasetBoundingBox", datasetBoundingBox.map(_.toLiteral)) .addQueryString("isFromTask" -> isFromTask.toString) - .addQueryStringOptional("minMag", magRestrictions.minStr) - .addQueryStringOptional("maxMag", magRestrictions.maxStr) .postWithProtoResponse[AnnotationProto]()(AnnotationProto) } diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index e753c152c7e..baa43bccb2d 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -143,7 +143,7 @@ export async function initialize( name: annotationFromTracingStore.name, description: annotationFromTracingStore.description, }; - annotationFromTracingStore.layers.forEach((layer) => { + annotationFromTracingStore.annotationLayers.forEach((layer) => { if ( maybeOutdatedAnnotation.annotationLayers.find((l) => l.tracingId === layer.tracingId) == null diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index ee1a3d92706..b2c564e91b1 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -585,7 +585,7 @@ export type APITracingStoreAnnotation = { name: string; description: string; version: number; - layers: APITracingStoreAnnotationLayer[]; + annotationLayers: APITracingStoreAnnotationLayer[]; }; export type APITimeTrackingPerUser = { diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index e8938373307..5eaab3982f7 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -11,7 +11,7 @@ message AnnotationProto { optional string name = 1; optional string description = 2; required int64 version = 3; - repeated AnnotationLayerProto layers = 4; + repeated AnnotationLayerProto annotationLayers = 4; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 482ebb5b12c..15563fab4a0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -83,7 +83,7 @@ case class AnnotationWithTracings( tracing: Either[SkeletonTracing, VolumeTracing]): AnnotationWithTracings = this.copy( annotation = annotation.copy( - layers = annotation.layers :+ AnnotationLayerProto( + annotationLayers = annotation.annotationLayers :+ AnnotationLayerProto( tracingId, a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), `type` = AnnotationLayerType.toProto(a.layerParameters.typ) @@ -92,11 +92,13 @@ case class AnnotationWithTracings( ) def deleteTracing(a: DeleteLayerAnnotationAction): AnnotationWithTracings = - this.copy(annotation = annotation.copy(layers = annotation.layers.filter(_.tracingId != a.tracingId)), - tracingsById = tracingsById.removed(a.tracingId)) + this.copy( + annotation = annotation.copy(annotationLayers = annotation.annotationLayers.filter(_.tracingId != a.tracingId)), + tracingsById = tracingsById.removed(a.tracingId) + ) def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = - this.copy(annotation = annotation.copy(layers = annotation.layers.map(l => + this.copy(annotation = annotation.copy(annotationLayers = annotation.annotationLayers.map(l => if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) def updateMetadata(a: UpdateMetadataAnnotationAction): AnnotationWithTracings = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index a449d6d80d0..675919921bc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -147,10 +147,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { tracingId <- action.tracingId.toFox ?~> "add layer action has no tracingId" _ <- bool2Fox( - !annotationWithTracings.annotation.layers + !annotationWithTracings.annotation.annotationLayers .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" _ <- bool2Fox( - !annotationWithTracings.annotation.layers.exists( + !annotationWithTracings.annotation.annotationLayers.exists( _.`type` == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters, @@ -366,7 +366,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss requestAll: Boolean)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = if (requestAll) - annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) else { (updates.flatMap { case u: SkeletonUpdateAction => Some(u.actionTracingId) @@ -375,7 +375,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } val volumeTracingIds = if (requestAll) - annotation.layers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) else { (updates.flatMap { case u: VolumeUpdateAction => Some(u.actionTracingId) @@ -622,26 +622,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newAnnotationId: String, version: Option[Long], isFromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { currentAnnotation <- get(annotationId, version) - newLayers <- Fox.serialCombined(currentAnnotation.layers)( - layer => - duplicateLayer(annotationId, - layer, - currentAnnotation.version, - isFromTask, - editPosition, - editRotation, - boundingBox, - datasetBoundingBox, - magRestrictions)) + newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => + duplicateLayer(annotationId, layer, currentAnnotation.version, isFromTask, datasetBoundingBox)) _ <- duplicateUpdates(annotationId, newAnnotationId) - duplicatedAnnotation = currentAnnotation.copy(layers = newLayers) + duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers) _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) } yield duplicatedAnnotation @@ -657,16 +644,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield () - private def duplicateLayer( - annotationId: String, - layer: AnnotationLayerProto, - version: Long, - isFromTask: Boolean, - editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox], - datasetBoundingBox: Option[BoundingBox], - magRestrictions: MagRestrictions)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = + private def duplicateLayer(annotationId: String, + layer: AnnotationLayerProto, + version: Long, + isFromTask: Boolean, + datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- layer.`type` match { case AnnotationLayerTypeProto.Volume => @@ -675,20 +658,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss version, version, isFromTask, - boundingBox, + None, datasetBoundingBox, - magRestrictions, - editPosition, - editRotation) + MagRestrictions.empty, + None, + None) case AnnotationLayerTypeProto.Skeleton => - duplicateSkeletonTracing(annotationId, - layer.tracingId, - version, - version, - isFromTask, - editPosition, - editRotation, - boundingBox) + duplicateSkeletonTracing(annotationId, layer.tracingId, version, version, isFromTask, None, None, None) case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") } } yield layer.copy(tracingId = newTracingId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index c27795c0cb4..6914d4aadf7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -1,7 +1,7 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject -import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.controllers.Controller @@ -14,7 +14,6 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -105,31 +104,18 @@ class TSAnnotationController @Inject()( newAnnotationId: String, version: Option[Long], isFromTask: Boolean, - minMag: Option[Int], - maxMag: Option[Int], - editPosition: Option[String], - editRotation: Option[String], - boundingBox: Option[String], datasetBoundingBox: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { - editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) - editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) - boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) datasetBoundingBoxParsed <- Fox.runOptional(datasetBoundingBox)(BoundingBox.fromLiteral) - magRestrictions = MagRestrictions(minMag, maxMag) annotationProto <- annotationService.duplicate(annotationId, newAnnotationId, version, isFromTask, - editPositionParsed, - editRotationParsed, - boundingBoxParsed, - datasetBoundingBoxParsed, - magRestrictions) + datasetBoundingBoxParsed) } yield Ok(annotationProto.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 06b9967a84c..612798c2a1e 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -11,7 +11,7 @@ POST /annotation/:annotationId/update GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) -POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String], datasetBoundingBox: Option[String]) +POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From 65f2ebd979701e0a428dd9aa300c60923661cef3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 10:26:00 +0100 Subject: [PATCH 137/361] wip merge --- .../com/scalableminds/util/tools/Fox.scala | 5 +- .../annotation/TSAnnotationService.scala | 10 +++- .../SkeletonTracingController.scala | 6 +-- .../controllers/TSAnnotationController.scala | 50 ++++++++++++++++++- .../tracings/TracingService.scala | 2 - .../skeleton/SkeletonTracingService.scala | 4 +- ...alableminds.webknossos.tracingstore.routes | 1 + 7 files changed, 63 insertions(+), 15 deletions(-) diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 2195b751eef..953ed2b7f5b 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -101,13 +101,14 @@ object Fox extends FoxImplicits { def sequence[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Future[List[Box[T]]] = Future.sequence(l.map(_.futureBox)) - def combined[T](l: List[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = + def combined[T](l: Seq[Fox[T]])(implicit ec: ExecutionContext): Fox[List[T]] = Fox(Future.sequence(l.map(_.futureBox)).map { results => results.find(_.isEmpty) match { case Some(Empty) => Empty case Some(failure: Failure) => failure case _ => - Full(results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full"))) + Full( + results.map(_.openOrThrowException("An exception should never be thrown, all boxes must be full")).toList) } }) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 675919921bc..ae49dafaec4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -203,6 +203,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, requestAll = false) } yield withTracings.annotation + def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Seq[AnnotationProto]] = + Fox.serialCombined(annotationIds) { annotationId => + get(annotationId, None) + } + private def getWithTracings( annotationId: String, version: Option[Long], @@ -600,7 +606,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - def findMultipleSkeletons(selectors: List[Option[TracingSelector]], + def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = @@ -608,7 +614,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss selectors.map { case Some(selector) => for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) // TODO perf skip that if we already have it? tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) .map(Some(_)) } yield tracing diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 40fca232591..5636b68ae02 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -120,8 +120,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin case Empty => Fox.successful(None) case f: Failure => f.toFox } - mergedTracing <- Fox.box2Fox( - skeletonTracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) + mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracingsWithIds.map(_._1))) _ <- skeletonTracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) } yield Ok(Json.toJson(newTracingId)) } @@ -134,8 +133,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracings: List[Option[SkeletonTracing]] = request.body for { - mergedTracing <- Fox.box2Fox( - skeletonTracingService.merge(tracings.flatten, MergedVolumeStats.empty(), Empty)) + mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten)) processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) } yield Ok(Json.toJson(newId)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 6914d4aadf7..ae3ab5dca3f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -3,10 +3,15 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerTypeProto, AnnotationProto} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.{ + KeyValueStoreImplicits, + TracingDataStore, + TracingId, + TracingSelector +} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, @@ -14,6 +19,8 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -24,6 +31,7 @@ class TSAnnotationController @Inject()( slackNotificationService: TSSlackNotificationService, annotationService: TSAnnotationService, annotationTransactionService: AnnotationTransactionService, + skeletonTracingService: SkeletonTracingService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -121,4 +129,42 @@ class TSAnnotationController @Inject()( } } } + + def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = + Action.async(validateJson[List[String]]) { implicit request => + log() { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + annotations: Seq[AnnotationProto] <- annotationService.getMultiple(request.body) ?~> Messages( + "annotation.notFound") + annotationsWithIds = annotations.zip(annotations) + skeletonIds = annotations.flatMap( + _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId)) + volumeIds = annotations.flatMap( + _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId)) + /*mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, + tracingsWithIds.map(_._1), + newTracingId, + newVersion = 0L, + toCache = !persist) + mergeEditableMappingsResultBox <- skeletonTracingService + .mergeEditableMappings(newTracingId, tracingsWithIds) + .futureBox + newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { + case Full(()) => Fox.successful(Some(newTracingId)) + case Empty => Fox.successful(None) + case f: Failure => f.toFox + }*/ + skeletons <- annotationService.findMultipleSkeletons(skeletonIds.map { s => + Some(TracingSelector(s)) + }, applyUpdates = true) + // TODO handle zero-skeletons / zero-volumes case + newSkeletonId = TracingId.generate + newVolumeId = TracingId.generate + mergedSkeleton <- skeletonTracingService.merge(skeletons.flatten).toFox + _ <- skeletonTracingService.save(mergedSkeleton, Some(newSkeletonId), version = 0, toCache = !persist) + } yield Ok + } + } + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index c458ec28d17..9c5dcae0db1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -69,8 +69,6 @@ trait TracingService[T <: GeneratedMessage] } } - def merge(tracings: Seq[T], mergedVolumeStats: MergedVolumeStats, newEditableMappingIdOpt: Option[String]): Box[T] - def remapTooLargeTreeIds(tracing: T): T = tracing def mergeVolumeData(tracingSelectors: Seq[TracingSelector], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 700c13c41c5..9c49183a7c8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -61,9 +61,7 @@ class SkeletonTracingService @Inject()( if (fromTask) newTracing.clearBoundingBox else newTracing } - def merge(tracings: Seq[SkeletonTracing], - mergedVolumeStats: MergedVolumeStats, - newEditableMappingIdOpt: Option[String]): Box[SkeletonTracing] = + def merge(tracings: Seq[SkeletonTracing]): Box[SkeletonTracing] = for { tracing <- tracings.map(Full(_)).reduceLeft(mergeTwo) } yield diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 612798c2a1e..d1bb507338b 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -12,6 +12,7 @@ GET /annotation/:annotationId/updateActionLog GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) +POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(persist: Boolean, newAnnotationId: String) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() From e55768d88ab5dd1538c5a861fb27705deea50bbd Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 11:14:05 +0100 Subject: [PATCH 138/361] merge skeletons --- app/models/annotation/AnnotationMerger.scala | 64 +++---------------- .../WKRemoteTracingStoreClient.scala | 11 ++++ .../annotation/TSAnnotationService.scala | 1 + .../controllers/TSAnnotationController.scala | 51 +++++++++++---- 4 files changed, 61 insertions(+), 66 deletions(-) diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index 2c251f01462..ec538208b15 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -2,11 +2,7 @@ package models.annotation import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.typesafe.scalalogging.LazyLogging import javax.inject.Inject @@ -51,7 +47,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr Fox.empty else { for { - mergedAnnotationLayers <- mergeTracingsOfAnnotations(annotations, datasetId, persistTracing) + mergedAnnotationLayers <- mergeAnnotationsInTracingstore(annotations, datasetId, newId, persistTracing) ?~> "Failed to merge annotations in tracingstore." } yield { Annotation( newId, @@ -65,56 +61,16 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr } } - private def mergeTracingsOfAnnotations(annotations: List[Annotation], datasetId: ObjectId, persistTracing: Boolean)( - implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = + private def mergeAnnotationsInTracingstore( + annotations: List[Annotation], + datasetId: ObjectId, + newAnnotationId: ObjectId, + persist: Boolean)(implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = for { dataset <- datasetDAO.findOne(datasetId) tracingStoreClient: WKRemoteTracingStoreClient <- tracingStoreService.clientFor(dataset) - skeletonLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Skeleton)) - volumeLayers = annotations.flatMap(_.annotationLayers.find(_.typ == AnnotationLayerType.Volume)) - mergedSkeletonTracingId <- mergeSkeletonTracings(tracingStoreClient, - skeletonLayers.map(_.tracingId), - persistTracing) - mergedVolumeTracingId <- mergeVolumeTracings(tracingStoreClient, volumeLayers.map(_.tracingId), persistTracing) - mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) - mergedVolumeName = allEqual(volumeLayers.map(_.name)) - mergedSkeletonLayer = mergedSkeletonTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Skeleton, - mergedSkeletonName.getOrElse(AnnotationLayer.defaultSkeletonLayerName), - AnnotationLayerStatistics.unknown)) - mergedVolumeLayer = mergedVolumeTracingId.map( - id => - AnnotationLayer(id, - AnnotationLayerType.Volume, - mergedVolumeName.getOrElse(AnnotationLayer.defaultVolumeLayerName), - AnnotationLayerStatistics.unknown)) - } yield List(mergedSkeletonLayer, mergedVolumeLayer).flatten - - private def allEqual(str: List[String]): Option[String] = - // returns the str if all names are equal, None otherwise - str.headOption.map(name => str.forall(_ == name)).flatMap { _ => - str.headOption - } - - private def mergeSkeletonTracings(tracingStoreClient: WKRemoteTracingStoreClient, - skeletonTracingIds: List[String], - persistTracing: Boolean) = - if (skeletonTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeSkeletonTracingsByIds(skeletonTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge skeleton tracings." + mergedAnnotationProto <- tracingStoreClient.mergeAnnotationsByIds(annotations.map(_.id), newAnnotationId, persist) + layers = mergedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) + } yield layers.toList - private def mergeVolumeTracings(tracingStoreClient: WKRemoteTracingStoreClient, - volumeTracingIds: List[String], - persistTracing: Boolean) = - if (volumeTracingIds.isEmpty) - Fox.successful(None) - else - tracingStoreClient - .mergeVolumeTracingsByIds(volumeTracingIds, persistTracing) - .map(Some(_)) ?~> "Failed to merge volume tracings." } diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index a5bb17dd70e..f7b42910bb6 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -136,6 +136,17 @@ class WKRemoteTracingStoreClient( .addQueryStringOptional("maxMag", magRestrictions.maxStr) .postWithJsonResponse[String]() + def mergeAnnotationsByIds(annotationIds: List[String], + newAnnotationId: ObjectId, + persist: Boolean): Fox[AnnotationProto] = { + logger.debug(s"Called to merge ${annotationIds.length} annotations by ids." + baseInfo) + rpc(s"${tracingStore.url}/tracings/annotation/mergedFromIds").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("persist" -> persist.toString) + .addQueryString("newAnnotationId" -> newAnnotationId.toString) + .postJsonWithProtoResponse[List[String], AnnotationProto](annotationIds)(AnnotationProto) + } + def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds").withLongTimeout diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ae49dafaec4..8f673d66862 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -606,6 +606,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } + // TODO build variant without TracingSelector and Option? def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index ae3ab5dca3f..a4ce7ec82a8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -3,8 +3,13 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerTypeProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} import com.scalableminds.webknossos.datastore.controllers.Controller +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, @@ -137,11 +142,12 @@ class TSAnnotationController @Inject()( for { annotations: Seq[AnnotationProto] <- annotationService.getMultiple(request.body) ?~> Messages( "annotation.notFound") - annotationsWithIds = annotations.zip(annotations) - skeletonIds = annotations.flatMap( - _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId)) - volumeIds = annotations.flatMap( - _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId)) + skeletonLayers = annotations.flatMap( + _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton)) + volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume)) + // TODO: Volume + // TODO: Merge updates? if so, iron out reverts? + // TODO: Merge editable mappings /*mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, tracingsWithIds.map(_._1), newTracingId, @@ -155,16 +161,37 @@ class TSAnnotationController @Inject()( case Empty => Fox.successful(None) case f: Failure => f.toFox }*/ - skeletons <- annotationService.findMultipleSkeletons(skeletonIds.map { s => - Some(TracingSelector(s)) + skeletons <- annotationService.findMultipleSkeletons(skeletonLayers.map { l => + Some(TracingSelector(l.tracingId)) }, applyUpdates = true) - // TODO handle zero-skeletons / zero-volumes case newSkeletonId = TracingId.generate newVolumeId = TracingId.generate - mergedSkeleton <- skeletonTracingService.merge(skeletons.flatten).toFox - _ <- skeletonTracingService.save(mergedSkeleton, Some(newSkeletonId), version = 0, toCache = !persist) - } yield Ok + mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultSkeletonLayerName) + mergedVolumeName = allEqual(volumeLayers.map(_.name)).getOrElse(AnnotationLayer.defaultVolumeLayerName) + mergedSkeletonOpt <- Fox.runIf(skeletons.flatten.nonEmpty)( + skeletonTracingService.merge(skeletons.flatten).toFox) + mergedSkeletonLayerOpt: Option[AnnotationLayerProto] = mergedSkeletonOpt.map( + _ => + AnnotationLayerProto(name = mergedSkeletonName, + tracingId = newSkeletonId, + `type` = AnnotationLayerTypeProto.Skeleton)) + mergedVolumeLayerOpt: Option[AnnotationLayerProto] = None // TODO + mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten + firstAnnotation <- annotations.headOption.toFox + mergedAnnotation = firstAnnotation.withAnnotationLayers(mergedLayers) + _ <- Fox.runOptional(mergedSkeletonOpt)( + skeletonTracingService.save(_, Some(newSkeletonId), version = 0L, toCache = !persist)) + _ <- tracingDataStore.annotations.put(newAnnotationId, 0L, mergedAnnotation) + } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } } } + + // TODO generalize, mix with assertAllOnSame* + private def allEqual(str: Seq[String]): Option[String] = + // returns the str if all names are equal, None otherwise + str.headOption.map(name => str.forall(_ == name)).flatMap { _ => + str.headOption + } } From 63c8e6f2acdaa34894a58d25b321aa441b3da9b4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 11:35:56 +0100 Subject: [PATCH 139/361] merge volume data --- .../WKRemoteTracingStoreClient.scala | 16 ----- .../annotation/TSAnnotationService.scala | 2 +- .../SkeletonTracingController.scala | 36 +---------- .../controllers/TSAnnotationController.scala | 62 ++++++++++++------- .../controllers/VolumeTracingController.scala | 37 +---------- .../tracings/TracingService.scala | 14 ----- .../skeleton/SkeletonTracingService.scala | 17 +---- .../volume/VolumeTracingService.scala | 38 +++++++----- ...alableminds.webknossos.tracingstore.routes | 2 - 9 files changed, 68 insertions(+), 156 deletions(-) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index f7b42910bb6..b832300245f 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -147,22 +147,6 @@ class WKRemoteTracingStoreClient( .postJsonWithProtoResponse[List[String], AnnotationProto](annotationIds)(AnnotationProto) } - def mergeSkeletonTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge SkeletonTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromIds").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) - } - - def mergeVolumeTracingsByIds(tracingIds: List[String], persistTracing: Boolean): Fox[String] = { - logger.debug("Called to merge VolumeTracings by ids." + baseInfo) - rpc(s"${tracingStore.url}/tracings/volume/mergedFromIds").withLongTimeout - .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) - .postJsonWithJsonResponse[List[TracingSelector], String](tracingIds.map(TracingSelector(_))) - } - def mergeSkeletonTracingsByContents(tracings: SkeletonTracings, persistTracing: Boolean): Fox[String] = { logger.debug("Called to merge SkeletonTracings by contents." + baseInfo) rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromContents").withLongTimeout diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8f673d66862..9bf7ff95522 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -590,7 +590,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield tracing } - def findMultipleVolumes(selectors: List[Option[TracingSelector]], + def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true, applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 5636b68ae02..17893f1670b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -6,11 +6,9 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} +import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages import play.api.libs.json.Json import com.scalableminds.webknossos.datastore.controllers.Controller @@ -95,38 +93,6 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - tracingOpts <- annotationService.findMultipleSkeletons(request.body, applyUpdates = true) ?~> Messages( - "tracing.notFound") - tracingsWithIds = tracingOpts.zip(request.body).flatMap { - case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) - case _ => None - } - newTracingId = TracingId.generate - mergedVolumeStats <- skeletonTracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newTracingId, - newVersion = 0L, - toCache = !persist) - mergeEditableMappingsResultBox <- skeletonTracingService - .mergeEditableMappings(newTracingId, tracingsWithIds) - .futureBox - newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { - case Full(()) => Fox.successful(Some(newTracingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox - } - mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracingsWithIds.map(_._1))) - _ <- skeletonTracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newTracingId)) - } - } - } - def mergedFromContents(persist: Boolean): Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index a4ce7ec82a8..c0c1caf54f3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -8,6 +8,7 @@ import com.scalableminds.webknossos.datastore.Annotation.{ AnnotationLayerTypeProto, AnnotationProto } +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.services.UserAccessRequest @@ -25,6 +26,8 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -37,6 +40,7 @@ class TSAnnotationController @Inject()( annotationService: TSAnnotationService, annotationTransactionService: AnnotationTransactionService, skeletonTracingService: SkeletonTracingService, + volumeTracingService: VolumeTracingService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -145,38 +149,52 @@ class TSAnnotationController @Inject()( skeletonLayers = annotations.flatMap( _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton)) volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume)) - // TODO: Volume + newSkeletonId = TracingId.generate + newVolumeId = TracingId.generate + mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultSkeletonLayerName) + mergedVolumeName = allEqual(volumeLayers.map(_.name)).getOrElse(AnnotationLayer.defaultVolumeLayerName) // TODO: Merge updates? if so, iron out reverts? // TODO: Merge editable mappings - /*mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newTracingId, - newVersion = 0L, - toCache = !persist) - mergeEditableMappingsResultBox <- skeletonTracingService - .mergeEditableMappings(newTracingId, tracingsWithIds) + volumeTracings <- annotationService + .findMultipleVolumes(volumeLayers.map { l => + Some(TracingSelector(l.tracingId)) + }, applyUpdates = true) + .map(_.flatten) + mergedVolumeStats <- volumeTracingService.mergeVolumeData(volumeLayers.map(_.tracingId), + volumeTracings, + newVolumeId, + newVersion = 0L, + persist = !persist) + mergeEditableMappingsResultBox <- volumeTracingService + .mergeEditableMappings(newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId))) .futureBox newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { - case Full(()) => Fox.successful(Some(newTracingId)) + case Full(()) => Fox.successful(Some(newVolumeId)) case Empty => Fox.successful(None) case f: Failure => f.toFox - }*/ - skeletons <- annotationService.findMultipleSkeletons(skeletonLayers.map { l => - Some(TracingSelector(l.tracingId)) - }, applyUpdates = true) - newSkeletonId = TracingId.generate - newVolumeId = TracingId.generate - mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) - .getOrElse(AnnotationLayer.defaultSkeletonLayerName) - mergedVolumeName = allEqual(volumeLayers.map(_.name)).getOrElse(AnnotationLayer.defaultVolumeLayerName) - mergedSkeletonOpt <- Fox.runIf(skeletons.flatten.nonEmpty)( - skeletonTracingService.merge(skeletons.flatten).toFox) - mergedSkeletonLayerOpt: Option[AnnotationLayerProto] = mergedSkeletonOpt.map( + } + mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( + volumeTracingService.merge(volumeTracings, mergedVolumeStats, newEditableMappingIdOpt)) + _ <- Fox.runOptional(mergedVolumeOpt)( + volumeTracingService.save(_, Some(newVolumeId), version = 0, toCache = !persist)) + skeletonTracings <- annotationService + .findMultipleSkeletons(skeletonLayers.map { l => + Some(TracingSelector(l.tracingId)) + }, applyUpdates = true) + .map(_.flatten) + mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( + skeletonTracingService.merge(skeletonTracings).toFox) + mergedSkeletonLayerOpt = mergedSkeletonOpt.map( _ => AnnotationLayerProto(name = mergedSkeletonName, tracingId = newSkeletonId, `type` = AnnotationLayerTypeProto.Skeleton)) - mergedVolumeLayerOpt: Option[AnnotationLayerProto] = None // TODO + mergedVolumeLayerOpt = mergedVolumeOpt.map( + _ => + AnnotationLayerProto(name = mergedVolumeName, + tracingId = newVolumeId, + `type` = AnnotationLayerTypeProto.Volume)) mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten firstAnnotation <- annotations.headOption.toFox mergedAnnotation = firstAnnotation.withAnnotationLayers(mergedLayers) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index f3458ab24c9..ea67c15b89c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -34,14 +34,14 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeSegmentStatisticsService, VolumeTracingService } -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingSelector} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig } -import net.liftweb.common.{Empty, Failure, Full} +import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile import play.api.libs.json.Json @@ -135,39 +135,6 @@ class VolumeTracingController @Inject()( } } - def mergedFromIds(persist: Boolean): Action[List[Option[TracingSelector]]] = - Action.async(validateJson[List[Option[TracingSelector]]]) { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - for { - tracingOpts <- annotationService.findMultipleVolumes(request.body, applyUpdates = true) ?~> Messages( - "tracing.notFound") - tracingsWithIds = tracingOpts.zip(request.body).flatMap { - case (Some(tracing), Some(selector)) => Some((tracing, selector.tracingId)) - case _ => None - } - newTracingId = TracingId.generate - mergedVolumeStats <- volumeTracingService.mergeVolumeData(request.body.flatten, - tracingsWithIds.map(_._1), - newTracingId, - newVersion = 0L, - toCache = !persist) - mergeEditableMappingsResultBox <- volumeTracingService - .mergeEditableMappings(newTracingId, tracingsWithIds) - .futureBox - newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { - case Full(()) => Fox.successful(Some(newTracingId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox - } - mergedTracing <- Fox.box2Fox( - volumeTracingService.merge(tracingsWithIds.map(_._1), mergedVolumeStats, newEditableMappingIdOpt)) - _ <- volumeTracingService.save(mergedTracing, Some(newTracingId), version = 0, toCache = !persist) - } yield Ok(Json.toJson(newTracingId)) - } - } - } - def initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 9c5dcae0db1..7ac23374aeb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,13 +1,9 @@ package com.scalableminds.webknossos.tracingstore.tracings -import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import com.typesafe.scalalogging.LazyLogging -import net.liftweb.common.Box -import play.api.i18n.MessagesProvider import scalapb.{GeneratedMessage, GeneratedMessageCompanion} import scala.concurrent.ExecutionContext @@ -69,14 +65,4 @@ trait TracingService[T <: GeneratedMessage] } } - def remapTooLargeTreeIds(tracing: T): T = tracing - - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[T], - newId: String, - newVersion: Long, - toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] - - def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(T, String)])( - implicit tc: TokenContext): Fox[Unit] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 9c49183a7c8..ad6c21d13d7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -1,18 +1,15 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton import com.google.inject.Inject -import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.util.tools.FoxImplicits import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats import net.liftweb.common.{Box, Full} -import play.api.i18n.MessagesProvider import scala.concurrent.ExecutionContext @@ -96,22 +93,12 @@ class SkeletonTracingService @Inject()( ) // Can be removed again when https://github.com/scalableminds/webknossos/issues/5009 is fixed - override def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = + def remapTooLargeTreeIds(skeletonTracing: SkeletonTracing): SkeletonTracing = if (skeletonTracing.trees.exists(_.treeId > 1048576)) { val newTrees = for ((tree, index) <- skeletonTracing.trees.zipWithIndex) yield tree.withTreeId(index + 1) skeletonTracing.withTrees(newTrees) } else skeletonTracing - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[SkeletonTracing], - newId: String, - newVersion: Long, - toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = - Fox.successful(MergedVolumeStats.empty()) - def dummyTracing: SkeletonTracing = SkeletonTracingDefaults.createInstance - def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(SkeletonTracing, String)])( - implicit tc: TokenContext): Fox[Unit] = - Fox.empty } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index be29e9fa621..b4e8aa105b3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -727,24 +727,24 @@ class VolumeTracingService @Inject()( case (None, None) => None } - private def bucketStreamFromSelector(selector: TracingSelector, tracing: VolumeTracing)( + private def bucketStreamFor(tracingId: String, tracing: VolumeTracing)( implicit tc: TokenContext): Iterator[(BucketPosition, Array[Byte])] = { - val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + val dataLayer = volumeTracingLayer(tracingId, tracing) dataLayer.bucketProvider.bucketStream(Some(tracing.version)) } - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], + def mergeVolumeData(tracingIds: Seq[String], tracings: Seq[VolumeTracing], newId: String, newVersion: Long, - toCache: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { + persist: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val magSets = new mutable.HashSet[Set[Vec3Int]]() - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => val magSet = new mutable.HashSet[Vec3Int]() - bucketStreamFromSelector(selector, tracing).foreach { + bucketStreamFor(tracingId, tracing).foreach { case (bucketPosition, _) => magSet.add(bucketPosition.mag) } @@ -770,15 +770,15 @@ class VolumeTracingService @Inject()( val mergedVolume = new MergedVolume(elementClass) - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).foreach { + case (tracingId, tracing) => + val bucketStream = bucketStreamFor(tracingId, tracing) mergedVolume.addLabelSetFromBucketStream(bucketStream, magsIntersection) } - tracingSelectors.zip(tracings).zipWithIndex.foreach { - case ((selector, tracing), sourceVolumeIndex) => - val bucketStream = bucketStreamFromSelector(selector, tracing) + tracingIds.zip(tracings).zipWithIndex.foreach { + case ((tracingIds, tracing), sourceVolumeIndex) => + val bucketStream = bucketStreamFor(tracingIds, tracing) mergedVolume.addFromBucketStream(sourceVolumeIndex, bucketStream, Some(magsIntersection)) } for { @@ -788,9 +788,9 @@ class VolumeTracingService @Inject()( elementClass) mergedAdditionalAxes <- Fox.box2Fox(AdditionalAxis.mergeAndAssertSameAdditionalAxes(tracings.map(t => AdditionalAxis.fromProtosAsOpt(t.additionalAxes)))) - firstTracingSelector <- tracingSelectors.headOption ?~> "merge.noTracings" + firstTracingId <- tracingIds.headOption ?~> "merge.noTracings" firstTracing <- tracings.headOption ?~> "merge.noTracings" - fallbackLayer <- getFallbackLayer(firstTracingSelector.tracingId, firstTracing) + fallbackLayer <- getFallbackLayer(firstTracingId, firstTracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer(newId, volumeSegmentIndexClient, newVersion, @@ -800,7 +800,13 @@ class VolumeTracingService @Inject()( tc) _ <- mergedVolume.withMergedBuckets { (bucketPosition, bucketBytes) => for { - _ <- saveBucket(newId, elementClass, bucketPosition, bucketBytes, newVersion, toCache, mergedAdditionalAxes) + _ <- saveBucket(newId, + elementClass, + bucketPosition, + bucketBytes, + newVersion, + toTemporaryStore = !persist, // TODO unify boolean direction + naming + mergedAdditionalAxes) _ <- Fox.runIf(shouldCreateSegmentIndex)( updateSegmentIndex(segmentIndexBuffer, bucketPosition, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index d1bb507338b..82274261f1f 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -30,7 +30,6 @@ GET /volume/:tracingId/findData POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple -POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(persist: Boolean) POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings @@ -72,7 +71,6 @@ GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) -POST /skeleton/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromIds(persist: Boolean) GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From 74c679b47c5fc3490aa636e5437c097c41f608c0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 30 Oct 2024 11:44:44 +0100 Subject: [PATCH 140/361] small renamings --- .../controllers/SkeletonTracingController.scala | 2 +- .../controllers/TSAnnotationController.scala | 9 ++++----- .../controllers/VolumeTracingController.scala | 2 +- .../tracingstore/tracings/TracingService.scala | 4 ++-- .../tracings/volume/VolumeTracingDownsampling.scala | 2 +- .../tracings/volume/VolumeTracingService.scala | 10 ++++++---- 6 files changed, 15 insertions(+), 14 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 17893f1670b..0c3bfbc1b1d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -101,7 +101,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin for { mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten)) processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) - newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, toCache = !persist) + newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, toTemporaryStore = !persist) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index c0c1caf54f3..8c14836fd2d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.Annotation.{ AnnotationLayerTypeProto, AnnotationProto } -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.services.UserAccessRequest @@ -165,9 +164,9 @@ class TSAnnotationController @Inject()( volumeTracings, newVolumeId, newVersion = 0L, - persist = !persist) + persist = persist) mergeEditableMappingsResultBox <- volumeTracingService - .mergeEditableMappings(newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId))) + .mergeEditableMappings(newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), persist) .futureBox newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { case Full(()) => Fox.successful(Some(newVolumeId)) @@ -177,7 +176,7 @@ class TSAnnotationController @Inject()( mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( volumeTracingService.merge(volumeTracings, mergedVolumeStats, newEditableMappingIdOpt)) _ <- Fox.runOptional(mergedVolumeOpt)( - volumeTracingService.save(_, Some(newVolumeId), version = 0, toCache = !persist)) + volumeTracingService.save(_, Some(newVolumeId), version = 0, toTemporaryStore = !persist)) skeletonTracings <- annotationService .findMultipleSkeletons(skeletonLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -199,7 +198,7 @@ class TSAnnotationController @Inject()( firstAnnotation <- annotations.headOption.toFox mergedAnnotation = firstAnnotation.withAnnotationLayers(mergedLayers) _ <- Fox.runOptional(mergedSkeletonOpt)( - skeletonTracingService.save(_, Some(newSkeletonId), version = 0L, toCache = !persist)) + skeletonTracingService.save(_, Some(newSkeletonId), version = 0L, toTemporaryStore = !persist)) _ <- tracingDataStore.annotations.put(newAnnotationId, 0L, mergedAnnotation) } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ea67c15b89c..665c185a55f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -168,7 +168,7 @@ class VolumeTracingController @Inject()( // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toCache = !persist) + newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toTemporaryStore = !persist) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 7ac23374aeb..c8ac3837bd4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -54,9 +54,9 @@ trait TracingService[T <: GeneratedMessage] } */ - def save(tracing: T, tracingId: Option[String], version: Long, toCache: Boolean = false): Fox[String] = { + def save(tracing: T, tracingId: Option[String], version: Long, toTemporaryStore: Boolean = false): Fox[String] = { val id = tracingId.getOrElse(TracingId.generate) - if (toCache) { + if (toTemporaryStore) { temporaryTracingStore.insert(id, tracing, Some(temporaryStoreTimeout)) temporaryTracingIdStore.insert(temporaryIdKey(id), "", Some(temporaryIdStoreTimeout)) Fox.successful(id) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 6e8e80eb1a8..0210da35f84 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -62,7 +62,7 @@ trait VolumeTracingDownsampling bucket: BucketPosition, data: Array[Byte], version: Long, - toCache: Boolean = false): Fox[Unit] + toTemporaryStore: Boolean = false): Fox[Unit] protected def updateSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b4e8aa105b3..cf11d610d39 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -598,14 +598,14 @@ class VolumeTracingService @Inject()( def updateMagList(tracingId: String, tracing: VolumeTracing, mags: Set[Vec3Int], - toCache: Boolean = false): Fox[String] = + toTemporaryStore: Boolean = false): Fox[String] = for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." _ <- bool2Fox(mags.nonEmpty) ?~> "Mag restrictions result in zero mags" id <- save(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), Some(tracingId), tracing.version, - toCache) + toTemporaryStore) } yield id // TODO use or remove @@ -894,10 +894,12 @@ class VolumeTracingService @Inject()( } } - def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(VolumeTracing, String)])( - implicit tc: TokenContext): Fox[Unit] = + def mergeEditableMappings(newTracingId: String, + tracingsWithIds: List[(VolumeTracing, String)], + persist: Boolean): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { + _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (used by compound annotations)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox From 1a59e62786210a1986207256b8dbe615252e9dfc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 30 Oct 2024 17:11:53 +0100 Subject: [PATCH 141/361] always use only data layers sent by tracing store and disregard layers set from core backend --- .../oxalis/model_initialization.ts | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index baa43bccb2d..7a022cc4933 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -138,24 +138,24 @@ export async function initialize( maybeOutdatedAnnotation.tracingStore.url, maybeOutdatedAnnotation.id, ); + const layersWithStats = annotationFromTracingStore.annotationLayers.map((layer) => { + const matchingLayer = maybeOutdatedAnnotation.annotationLayers.find( + (l) => l.tracingId === layer.tracingId, + ); + + return { + tracingId: layer.tracingId, + name: layer.name, + typ: layer.type, + stats: matchingLayer?.stats || {}, + }; + }); const completeAnnotation = { ...maybeOutdatedAnnotation, name: annotationFromTracingStore.name, description: annotationFromTracingStore.description, + annotationLayers: layersWithStats, }; - annotationFromTracingStore.annotationLayers.forEach((layer) => { - if ( - maybeOutdatedAnnotation.annotationLayers.find((l) => l.tracingId === layer.tracingId) == - null - ) { - completeAnnotation.annotationLayers.push({ - tracingId: layer.tracingId, - name: layer.name, - typ: layer.type, - stats: {}, - }); - } - }); annotation = completeAnnotation; } datasetId = { From 0e9fffc428af85b5caa5c3a1570cc33b06d68b98 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 10:55:27 +0100 Subject: [PATCH 142/361] earliestAccessibleVersion; always fetch all layers; wip resetToBase --- app/controllers/AnnotationController.scala | 1 - .../WKRemoteTracingStoreController.scala | 6 +- app/models/annotation/AnnotationService.scala | 14 +- webknossos-datastore/proto/Annotation.proto | 1 + .../TSRemoteWebknossosClient.scala | 20 +- .../annotation/AnnotationReversion.scala | 15 +- .../AnnotationTransactionService.scala | 46 +++- .../annotation/AnnotationUpdateActions.scala | 17 ++ .../annotation/TSAnnotationService.scala | 239 +++++++----------- .../annotation/UpdateActions.scala | 3 + .../EditableMappingController.scala | 23 +- .../controllers/TSAnnotationController.scala | 17 ++ .../controllers/VolumeTracingController.scala | 11 +- .../EditableMappingUpdater.scala | 31 +-- .../volume/VolumeTracingService.scala | 16 +- ...alableminds.webknossos.tracingstore.routes | 1 + 16 files changed, 230 insertions(+), 231 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 62b64efea09..b0fbcedabbe 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -11,7 +11,6 @@ import com.scalableminds.webknossos.datastore.models.annotation.{ AnnotationLayerType } import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters -import com.scalableminds.webknossos.tracingstore.tracings.volume.MagRestrictions import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotationEvent} diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index b659700a52f..4dbef690f7b 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport +import com.scalableminds.webknossos.tracingstore.AnnotationUpdatesReport import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.TracingId @@ -88,8 +88,8 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } yield Ok } - def handleTracingUpdateReport(name: String, key: String): Action[TracingUpdatesReport] = - Action.async(validateJson[TracingUpdatesReport]) { implicit request => + def handleTracingUpdateReport(name: String, key: String): Action[AnnotationUpdatesReport] = + Action.async(validateJson[AnnotationUpdatesReport]) { implicit request => implicit val ctx: DBAccessContext = GlobalAccessContext tracingStoreService.validateAccess(name, key) { _ => val report = request.body diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 759b8efe180..9f7a48c0aed 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -278,10 +278,13 @@ class AnnotationService @Inject()( AnnotationLayerType.toProto(l.typ) ) } - annotationProto = AnnotationProto(name = Some(AnnotationDefaults.defaultName), - description = Some(AnnotationDefaults.defaultDescription), - version = 0L, - annotationLayers = layersProto) + annotationProto = AnnotationProto( + name = Some(AnnotationDefaults.defaultName), + description = Some(AnnotationDefaults.defaultDescription), + version = 0L, + annotationLayers = layersProto, + earliestAccessibleVersion = 0L + ) _ <- tracingStoreClient.saveAnnotationProto(annotationId, annotationProto) } yield newAnnotationLayers @@ -485,7 +488,8 @@ class AnnotationService @Inject()( name = Some(AnnotationDefaults.defaultName), description = Some(AnnotationDefaults.defaultDescription), version = 0L, - annotationLayers = annotationLayers.map(_.toProto) + annotationLayers = annotationLayers.map(_.toProto), + earliestAccessibleVersion = 0L ) _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) _ = logger.info(s"inserting base annotation ${annotationBase._id} for task ${task._id}") diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 5eaab3982f7..4aea0922056 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -12,6 +12,7 @@ message AnnotationProto { optional string description = 2; required int64 version = 3; repeated AnnotationLayerProto annotationLayers = 4; + required int64 earliestAccessibleVersion = 5; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 523daee164b..f500843ebbc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -26,15 +26,15 @@ import play.api.libs.ws.WSResponse import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt -case class TracingUpdatesReport(annotationId: String, - // TODO stats per tracing id? coordinate with frontend - timestamps: List[Instant], - statistics: Option[JsObject], - significantChangesCount: Int, - viewChangesCount: Int, - userToken: Option[String]) -object TracingUpdatesReport { - implicit val jsonFormat: OFormat[TracingUpdatesReport] = Json.format[TracingUpdatesReport] +case class AnnotationUpdatesReport(annotationId: String, + // TODO stats per tracing id? coordinate with frontend + timestamps: List[Instant], + statistics: Option[JsObject], + significantChangesCount: Int, + viewChangesCount: Int, + userToken: Option[String]) +object AnnotationUpdatesReport { + implicit val jsonFormat: OFormat[AnnotationUpdatesReport] = Json.format[AnnotationUpdatesReport] } class TSRemoteWebknossosClient @Inject()( @@ -53,7 +53,7 @@ class TSRemoteWebknossosClient @Inject()( private lazy val annotationIdByTracingIdCache: AlfuCache[String, String] = AlfuCache(maxCapacity = 10000, timeToLive = 5 minutes) - def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = + def reportAnnotationUpdates(tracingUpdatesReport: AnnotationUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") .addQueryString("key" -> tracingStoreKey) .silent diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala index a251d545fee..6fd848fc7b7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationReversion.scala @@ -13,7 +13,7 @@ trait AnnotationReversion { def revertDistributedElements(currentAnnotationWithTracings: AnnotationWithTracings, sourceAnnotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionAnnotationAction, + sourceVersion: Long, newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = for { _ <- Fox.serialCombined(sourceAnnotationWithTracings.getVolumes) { @@ -22,23 +22,20 @@ trait AnnotationReversion { for { tracingBeforeRevert <- currentAnnotationWithTracings.getVolume(tracingId).toFox _ <- Fox.runIf(!sourceTracing.getHasEditableMapping)( - volumeTracingService.revertVolumeData(tracingId, - revertAction.sourceVersion, - sourceTracing, - newVersion: Long, - tracingBeforeRevert)) + volumeTracingService + .revertVolumeData(tracingId, sourceVersion, sourceTracing, newVersion: Long, tracingBeforeRevert)) _ <- Fox.runIf(sourceTracing.getHasEditableMapping)( - revertEditableMappingFields(currentAnnotationWithTracings, revertAction, tracingId)) + revertEditableMappingFields(currentAnnotationWithTracings, sourceVersion, tracingId)) } yield () } } yield () private def revertEditableMappingFields(currentAnnotationWithTracings: AnnotationWithTracings, - revertAction: RevertToVersionAnnotationAction, + sourceVersion: Long, tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = for { updater <- currentAnnotationWithTracings.getEditableMappingUpdater(tracingId).toFox - _ <- updater.revertToVersion(revertAction) + _ <- updater.revertToVersion(sourceVersion) _ <- updater.flushBuffersToFossil() } yield () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 2bad1a3e045..6bd9a1e7294 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -1,9 +1,14 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import com.scalableminds.webknossos.tracingstore.{ + TSRemoteWebknossosClient, + TracingStoreRedisStore, + AnnotationUpdatesReport +} import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} import com.scalableminds.webknossos.tracingstore.tracings.volume.{ BucketMutatingVolumeUpdateAction, @@ -22,6 +27,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe uncommittedUpdatesStore: TracingStoreRedisStore, volumeTracingService: VolumeTracingService, tracingDataStore: TracingDataStore, + remoteWebknossosClient: TSRemoteWebknossosClient, annotationService: TSAnnotationService) extends KeyValueStoreImplicits with LazyLogging { @@ -146,6 +152,24 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe ) } + def handleSingleUpdateAction(annotationId: String, currentVersion: Long, updateAction: UpdateAction)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Long] = { + val wrapped = List( + UpdateActionGroup( + currentVersion + 1, + System.currentTimeMillis(), + None, + List(updateAction), + None, + None, + "dummyTransactionId", + 1, + 0 + )) + handleUpdateGroups(annotationId, wrapped) + } + def handleUpdateGroups(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = if (updateGroups.forall(_.transactionGroupCount == 1)) { @@ -161,7 +185,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe private def commitUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = for { - _ <- annotationService.reportUpdates(annotationId, updateGroups) + _ <- reportUpdates(annotationId, updateGroups) currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) _ = logger.info(s"trying to commit ${updateGroups .map(_.actions.length) @@ -228,9 +252,9 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe case first :: rest => first.addInfo(updateActionGroup.info) :: rest } actionsWithInfo.map { - case a: UpdateBucketVolumeAction => a.withoutBase64Data + case a: UpdateBucketVolumeAction => a.withoutBase64Data case a: AddLayerAnnotationAction => a.copy(tracingId = Some(TracingId.generate)) - case a => a + case a => a } } @@ -250,4 +274,18 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe } yield updateGroup.version } + private def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])( + implicit tc: TokenContext): Fox[Unit] = + for { + _ <- remoteWebknossosClient.reportAnnotationUpdates( + AnnotationUpdatesReport( + annotationId, + timestamps = updateGroups.map(g => Instant(g.timestamp)), + statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer + significantChangesCount = updateGroups.map(_.significantChangesCount).sum, + viewChangesCount = updateGroups.map(_.viewChangesCount).sum, + tc.userTokenOpt + )) + } yield () + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 95a35cd4c95..5dc0eb5e273 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -93,6 +93,19 @@ case class RevertToVersionAnnotationAction(sourceVersion: Long, this.copy(actionAuthorId = authorId) } +// Used only in tasks by admin to undo the work done of the annotator +case class ResetToBaseAnnotationAction(actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) + extends AnnotationUpdateAction + with ApplyImmediatelyUpdateAction { + override def addTimestamp(timestamp: Long): UpdateAction = + this.copy(actionTimestamp = Some(timestamp)) + override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def addAuthorId(authorId: Option[String]): UpdateAction = + this.copy(actionAuthorId = authorId) +} + case class UpdateTdCameraAnnotationAction(actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) @@ -125,6 +138,10 @@ object RevertToVersionAnnotationAction { implicit val jsonFormat: OFormat[RevertToVersionAnnotationAction] = Json.format[RevertToVersionAnnotationAction] } +object ResetToBaseAnnotationAction { + implicit val jsonFormat: OFormat[ResetToBaseAnnotationAction] = + Json.format[ResetToBaseAnnotationAction] +} object UpdateTdCameraAnnotationAction { implicit val jsonFormat: OFormat[UpdateTdCameraAnnotationAction] = Json.format[UpdateTdCameraAnnotationAction] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 9bf7ff95522..2b020c5abdc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -3,7 +3,6 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.Annotation.{ @@ -35,7 +34,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, UpdateMappingNameVolumeAction, VolumeTracingService, - VolumeUpdateAction } import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, @@ -45,11 +43,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ TracingSelector, VersionedKeyValuePair } -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingUpdatesReport -} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Full} import play.api.libs.json.{JsObject, JsValue, Json} @@ -71,22 +65,46 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss with LazyLogging { private lazy val materializedAnnotationWithTracingCache = - // annotation id, version, requestedSkeletons, requestedVolumes, requestAll - // TODO instead of requested, use list of tracings determined from requests + updates? - AlfuCache[(String, Long, List[String], List[String], Boolean), AnnotationWithTracings](maxCapacity = 1000) + // annotation id, version + AlfuCache[(String, Long), AnnotationWithTracings](maxCapacity = 1000) - def reportUpdates(annotationId: String, updateGroups: List[UpdateActionGroup])(implicit tc: TokenContext): Fox[Unit] = + def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationProto] = for { - _ <- remoteWebknossosClient.reportTracingUpdates( - TracingUpdatesReport( - annotationId, - timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer - significantChangesCount = updateGroups.map(_.significantChangesCount).sum, - viewChangesCount = updateGroups.map(_.viewChangesCount).sum, - tc.userTokenOpt - )) - } yield () + withTracings <- getWithTracings(annotationId, version) + } yield withTracings.annotation + + def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Seq[AnnotationProto]] = + Fox.serialCombined(annotationIds) { annotationId => + get(annotationId, None) + } + + private def getWithTracings(annotationId: String, version: Option[Long])( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + newestMaterialized <- getNewestMaterialized(annotationId) + targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" + // When requesting any other than the newest version, do not consider the changes final + reportChangesToWk = version.isEmpty || version.contains(targetVersion) + updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( + (annotationId, targetVersion), + _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) + ) + } yield updatedAnnotation + + private def getWithTracingsVersioned(annotationId: String, version: Long, reportChangesToWk: Boolean)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = + for { + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( + fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + _ = logger.info( + s"cache miss for $annotationId v$version, applying updates from ${annotationWithVersion.version} to $version...") + annotation = annotationWithVersion.value + updated <- applyPendingUpdates(annotation, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" + } yield updated def currentMaterializableVersion(annotationId: String): Fox[Long] = tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) @@ -94,6 +112,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def currentMaterializedVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def getNewestMaterialized(annotationId: String): Fox[AnnotationProto] = + for { + keyValuePair <- tracingDataStore.annotations.get[AnnotationProto](annotationId, mayBeEmpty = Some(true))( + fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + } yield keyValuePair.value + private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = if (desiredVersion == existingVersion) Fox.successful(List()) @@ -133,7 +157,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case a: RevertToVersionAnnotationAction => - revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO if the revert action is not isolated, we need not the target version of all but the target version of this update + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO: if the revert action is not isolated, we need not the target version of all but the target version of this update + case _: ResetToBaseAnnotationAction => + resetToBase(annotationId, annotationWithTracings, targetVersion) case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") @@ -163,19 +189,27 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationWithTracings: AnnotationWithTracings, revertAction: RevertToVersionAnnotationAction, newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = - // Note: works only after “ironing out” the update action groups + // Note: works only if revert actions are in separate update groups for { - sourceAnnotation: AnnotationWithTracings <- getWithTracings( - annotationId, - Some(revertAction.sourceVersion), - List.empty, - List.empty, - requestAll = true) // TODO do we need to request the others? + _ <- bool2Fox(revertAction.sourceVersion >= annotationWithTracings.annotation.earliestAccessibleVersion) ?~> f"Trying to revert to ${revertAction.sourceVersion}, but earliest accessible is ${annotationWithTracings.annotation.earliestAccessibleVersion}" + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(revertAction.sourceVersion)) _ = logger.info( s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") - _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction, newVersion) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction.sourceVersion, newVersion) } yield sourceAnnotation + private def resetToBase(annotationId: String, annotationWithTracings: AnnotationWithTracings, newVersion: Long)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = { + // Note: works only if reset actions are in separate update groups + val sourceVersion = 0L // Tasks are always created with as v0 currently + logger.info(s"Resetting annotation $annotationId to base (v$sourceVersion)") + for { + sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(sourceVersion)) + _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, sourceVersion, newVersion) + } yield sourceAnnotation + } + def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( implicit ec: ExecutionContext): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -197,69 +231,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield Json.toJson(updateActionBatches.flatten.map(versionedTupleToJson)) } - def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationProto] = - for { - withTracings <- getWithTracings(annotationId, version, List.empty, List.empty, requestAll = false) - } yield withTracings.annotation - - def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, - tc: TokenContext): Fox[Seq[AnnotationProto]] = - Fox.serialCombined(annotationIds) { annotationId => - get(annotationId, None) - } - - private def getWithTracings( - annotationId: String, - version: Option[Long], - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - requestAll: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = - for { - targetVersion <- determineTargetVersion(annotationId, version) ?~> "determineTargetVersion.failed" - // When requesting any other than the newest version, do not consider the changes final - reportChangesToWk = version.isEmpty || version.contains(targetVersion) - updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( - (annotationId, targetVersion, requestedSkeletonTracingIds, requestedVolumeTracingIds, requestAll), - _ => - getWithTracingsVersioned( - annotationId, - targetVersion, - requestedSkeletonTracingIds, - requestedVolumeTracingIds, - requestAll = true, - reportChangesToWk = reportChangesToWk) // TODO can we request fewer to save perf? still need to avoid duplicate apply - ) - } yield updatedAnnotation - - private def getWithTracingsVersioned( - annotationId: String, - version: Long, - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - requestAll: Boolean, - reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = - for { - annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( - fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" - _ = logger.info( - s"cache miss for ${annotationId} v$version, requested ${requestedSkeletonTracingIds.mkString(",")} + ${requestedVolumeTracingIds - .mkString(",")} (requestAll=$requestAll). Applying updates from ${annotationWithVersion.version} to $version...") - annotation = annotationWithVersion.value - updated <- applyPendingUpdates(annotation, - annotationId, - version, - requestedSkeletonTracingIds, - requestedVolumeTracingIds, - requestAll, - reportChangesToWk) ?~> "applyUpdates.failed" - } yield updated - def findEditableMappingInfo(annotationId: String, tracingId: String, version: Option[Long] = None)( implicit ec: ExecutionContext, tc: TokenContext): Fox[EditableMappingInfo] = for { - annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) ?~> "getWithTracings.failed" + annotation <- getWithTracings(annotationId, version) ?~> "getWithTracings.failed" tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing @@ -284,23 +260,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotation: AnnotationProto, annotationId: String, targetVersion: Long, - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - requestAll: Boolean, reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) - updatesFlat = updatesGroupsRegrouped.flatMap(_._2) - annotationWithTracings <- findTracingsForUpdates(annotation, - updatesFlat, - requestedSkeletonTracingIds, - requestedVolumeTracingIds, - requestAll) ?~> "findTracingsForUpdates.failed" - annotationWithTracingsAndMappings <- findEditableMappingsForUpdates( + annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForUpdates.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( annotationId, annotationWithTracings, - updatesFlat, annotation.version, targetVersion) // TODO: targetVersion must be set per update group, as reverts may come between these updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, @@ -309,15 +276,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss reportChangesToWk) ?~> "applyUpdates.inner.failed" } yield updated - private def findEditableMappingsForUpdates( // TODO integrate with findTracings? - annotationId: String, - annotationWithTracings: AnnotationWithTracings, - updates: List[UpdateAction], - currentMaterializedVersion: Long, - targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { + private def findEditableMappingsForAnnotation( + annotationId: String, + annotationWithTracings: AnnotationWithTracings, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping logger.info(s"fetching editable mappings ${volumeWithEditableMapping.map(_._2).mkString(",")}") - // TODO perf optimization: intersect with editable mapping updates? unless requested for { idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { case (volumeTracing, volumeTracingId) => @@ -364,30 +329,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss relyOnAgglomerateIds = false // TODO should we? ) - private def findTracingsForUpdates( - annotation: AnnotationProto, - updates: List[UpdateAction], - requestedSkeletonTracingIds: List[String], - requestedVolumeTracingIds: List[String], - requestAll: Boolean)(implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + private def findTracingsForAnnotation(annotation: AnnotationProto)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = - if (requestAll) - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) - else { - (updates.flatMap { - case u: SkeletonUpdateAction => Some(u.actionTracingId) - case _ => None - } ++ requestedSkeletonTracingIds).distinct - } + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + val volumeTracingIds = - if (requestAll) - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) - else { - (updates.flatMap { - case u: VolumeUpdateAction => Some(u.actionTracingId) - case _ => None - } ++ requestedVolumeTracingIds).distinct - } + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) logger.info(s"fetching volumes $volumeTracingIds and skeletons $skeletonTracingIds") for { @@ -494,22 +442,26 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) - private def determineTargetVersion(annotationId: String, targetVersionOpt: Option[Long]): Fox[Long] = + private def determineTargetVersion(annotationId: String, + newestMaterializedAnnotation: AnnotationProto, + requestedVersionOpt: Option[Long]): Fox[Long] = /* * Determines the newest saved version from the updates column. * if there are no updates at all, assume annotation is brand new (possibly created from NML, - * hence the emptyFallbck annotation.version) + * hence the emptyFallbck newestMaterializedAnnotation.version) */ for { newestUpdateVersion <- tracingDataStore.annotationUpdates.getVersion(annotationId, mayBeEmpty = Some(true), - emptyFallback = Some(0L)) - } yield { - targetVersionOpt match { - case None => newestUpdateVersion - case Some(desiredSome) => math.min(desiredSome, newestUpdateVersion) + emptyFallback = + Some(newestMaterializedAnnotation.version)) + targetVersion = requestedVersionOpt match { + case None => newestUpdateVersion + case Some(requestedVersion) => + math.max(newestMaterializedAnnotation.earliestAccessibleVersion, + math.min(requestedVersion, newestUpdateVersion)) } - } + } yield targetVersion def updateActionStatistics(tracingId: String): Fox[JsObject] = for { @@ -568,10 +520,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None, - useCache: Boolean = true, + useCache: Boolean = true, // TODO applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = for { - annotation <- getWithTracings(annotationId, version, List.empty, List(tracingId), requestAll = false) // TODO is applyUpdates still needed? + annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? tracing <- annotation.getVolume(tracingId) } yield tracing @@ -579,13 +531,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId: String, tracingId: String, version: Option[Long] = None, - useCache: Boolean = true, + useCache: Boolean = true, // TODO applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = if (tracingId == TracingId.dummy) Fox.successful(skeletonTracingService.dummyTracing) else { for { - annotation <- getWithTracings(annotationId, version, List(tracingId), List.empty, requestAll = false) // TODO is applyUpdates still needed? + annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? tracing <- annotation.getSkeleton(tracingId) } yield tracing } @@ -635,13 +587,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => duplicateLayer(annotationId, layer, currentAnnotation.version, isFromTask, datasetBoundingBox)) _ <- duplicateUpdates(annotationId, newAnnotationId) - duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers) + duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, + earliestAccessibleVersion = currentAnnotation.version) _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) } yield duplicatedAnnotation private def duplicateUpdates(annotationId: String, newAnnotationId: String)( implicit ec: ExecutionContext): Fox[Unit] = - // TODO perf: batch or use fossildb duplicate api + // TODO memory: batch for { updatesAsBytes: Seq[(Long, Array[Byte])] <- tracingDataStore.annotationUpdates .getMultipleVersionsAsVersionValueTuple(annotationId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index 091ae7f327e..c81be809cb7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -110,6 +110,7 @@ object UpdateAction { case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) + case "resetToBase" => deserialize[ResetToBaseAnnotationAction](jsonValue) case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") @@ -215,6 +216,8 @@ object UpdateAction { "value" -> Json.toJson(s)(UpdateMetadataAnnotationAction.jsonFormat)) case s: RevertToVersionAnnotationAction => Json.obj("name" -> "revertToVersion", "value" -> Json.toJson(s)(RevertToVersionAnnotationAction.jsonFormat)) + case s: ResetToBaseAnnotationAction => + Json.obj("name" -> "resetToBase", "value" -> Json.toJson(s)(ResetToBaseAnnotationAction.jsonFormat)) case s: UpdateTdCameraAnnotationAction => Json.obj("name" -> "updateTdCamera", "value" -> Json.toJson(s)(UpdateTdCameraAnnotationAction.jsonFormat)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 5abf9014d24..a1e6fc9a3b7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -8,11 +8,7 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import com.scalableminds.webknossos.tracingstore.annotation.{ - AnnotationTransactionService, - TSAnnotationService, - UpdateActionGroup -} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, MinCutParameters, @@ -52,19 +48,10 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer actionTracingId = tracingId, actionTimestamp = Some(System.currentTimeMillis())) _ <- annotationTransactionService - .handleUpdateGroups( // TODO replace this route by the update action only? address editable mappings by volume tracing id? - annotationId, - List( - UpdateActionGroup(tracing.version + 1, - System.currentTimeMillis(), - None, - List(volumeUpdate), - None, - None, - "dummyTransactionId", - 1, - 0)) - ) + .handleSingleUpdateAction( // TODO replace this route by the update action only? + annotationId, + tracing.version, + volumeUpdate) infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) } yield Ok(infoJson) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 8c14836fd2d..2f543b04548 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -20,6 +20,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, + ResetToBaseAnnotationAction, TSAnnotationService, UpdateActionGroup } @@ -138,6 +139,22 @@ class TSAnnotationController @Inject()( } } + def resetToBase(annotationId: String): Action[AnyContent] = + Action.async { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + for { + currentVersion <- annotationService.currentMaterializableVersion(annotationId) + _ <- annotationTransactionService.handleSingleUpdateAction(annotationId, + currentVersion, + ResetToBaseAnnotationAction()) + } yield Ok + } + } + } + } + def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = Action.async(validateJson[List[String]]) { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 665c185a55f..c7444ae982d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -26,6 +26,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransacti import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ImportVolumeDataVolumeAction, MagRestrictions, MergedVolumeStats, TSFullMeshService, @@ -249,11 +250,11 @@ class VolumeTracingController @Inject()( tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") currentVersion <- request.body.dataParts("currentVersion").headOption.flatMap(_.toIntOpt).toFox zipFile <- request.body.files.headOption.map(f => new File(f.ref.path.toString)).toFox - (updateGroup, largestSegmentId) <- volumeTracingService.importVolumeData(tracingId, - tracing, - zipFile, - currentVersion) - _ <- annotationTransactionService.handleUpdateGroups(annotationId, List(updateGroup)) + largestSegmentId <- volumeTracingService.importVolumeData(tracingId, tracing, zipFile, currentVersion) + _ <- annotationTransactionService.handleSingleUpdateAction( + annotationId, + tracing.version, + ImportVolumeDataVolumeAction(tracingId, Some(largestSegmentId))) } yield Ok(Json.toJson(largestSegmentId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index d7231195a67..fc545c9e8d7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -10,11 +10,7 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient -import com.scalableminds.webknossos.tracingstore.annotation.{ - RevertToVersionAnnotationAction, - TSAnnotationService, - UpdateAction -} +import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction} import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, @@ -418,9 +414,9 @@ class EditableMappingUpdater( ) } - def revertToVersion(revertAction: RevertToVersionAnnotationAction)(implicit ec: ExecutionContext): Fox[Unit] = + def revertToVersion(sourceVersion: Long)(implicit ec: ExecutionContext): Fox[Unit] = for { - _ <- bool2Fox(revertAction.sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" + _ <- bool2Fox(sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" _ = segmentToAgglomerateBuffer.clear() _ = agglomerateToGraphBuffer.clear() segmentToAgglomerateChunkNewestStream = new VersionedSegmentToAgglomerateChunkIterator( @@ -428,16 +424,13 @@ class EditableMappingUpdater( tracingDataStore.editableMappingsSegmentToAgglomerate) _ <- Fox.serialCombined(segmentToAgglomerateChunkNewestStream) { case (chunkKey, _, version) => - if (version > revertAction.sourceVersion) { - editableMappingService - .getSegmentToAgglomerateChunk(chunkKey, Some(revertAction.sourceVersion)) - .futureBox - .map { - case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) - case Empty => segmentToAgglomerateBuffer.put(chunkKey, (Map[Long, Long](), true)) - case Failure(msg, _, chain) => - Fox.failure(msg, Empty, chain) - } + if (version > sourceVersion) { + editableMappingService.getSegmentToAgglomerateChunk(chunkKey, Some(sourceVersion)).futureBox.map { + case Full(chunkData) => segmentToAgglomerateBuffer.put(chunkKey, (chunkData.toMap, false)) + case Empty => segmentToAgglomerateBuffer.put(chunkKey, (Map[Long, Long](), true)) + case Failure(msg, _, chain) => + Fox.failure(msg, Empty, chain) + } } else Fox.successful(()) } agglomerateToGraphNewestStream = new VersionedAgglomerateToGraphIterator( @@ -445,11 +438,11 @@ class EditableMappingUpdater( tracingDataStore.editableMappingsAgglomerateToGraph) _ <- Fox.serialCombined(agglomerateToGraphNewestStream) { case (graphKey, _, version) => - if (version > revertAction.sourceVersion) { + if (version > sourceVersion) { for { agglomerateId <- agglomerateIdFromAgglomerateGraphKey(graphKey) _ <- editableMappingService - .getAgglomerateGraphForId(tracingId, revertAction.sourceVersion, agglomerateId) + .getAgglomerateGraphForId(tracingId, sourceVersion, agglomerateId) .futureBox .map { case Full(graphData) => agglomerateToGraphBuffer.put(graphKey, (graphData, false)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index cf11d610d39..bd8812f36a5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -23,7 +23,6 @@ import com.scalableminds.webknossos.datastore.models.{ WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.tracingstore.annotation.UpdateActionGroup import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat @@ -824,7 +823,7 @@ class VolumeTracingService @Inject()( def importVolumeData(tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int)( implicit mp: MessagesProvider, - tc: TokenContext): Fox[(UpdateActionGroup, Long)] = + tc: TokenContext): Fox[Long] = if (currentVersion != tracing.version) Fox.failure("version.mismatch") else { @@ -879,18 +878,7 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() - updateGroup = UpdateActionGroup( - tracing.version + 1, - System.currentTimeMillis(), - None, - List(ImportVolumeDataVolumeAction(tracingId, Some(mergedVolume.largestSegmentId.toPositiveLong))), - None, - None, - "dummyTransactionId", - 1, - 0 - ) - } yield (updateGroup, mergedVolume.largestSegmentId.toPositiveLong) + } yield mergedVolume.largestSegmentId.toPositiveLong } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 82274261f1f..5bb6a936629 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -12,6 +12,7 @@ GET /annotation/:annotationId/updateActionLog GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) +POST /annotation/:annotationId/resetToBase @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.resetToBase(annotationId: String) POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(persist: Boolean, newAnnotationId: String) # Volume tracings From 02f78b766341f5b59103de52e3fbaf7a1b153470 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 11:11:46 +0100 Subject: [PATCH 143/361] call resetToBase --- app/controllers/AnnotationIOController.scala | 4 +- app/models/annotation/AnnotationService.scala | 56 +++---------------- .../WKRemoteTracingStoreClient.scala | 7 +++ ...sampling.scala => VolumeTracingMags.scala} | 6 +- .../volume/VolumeTracingService.scala | 2 +- 5 files changed, 20 insertions(+), 55 deletions(-) rename webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/{VolumeTracingDownsampling.scala => VolumeTracingMags.scala} (98%) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index cf5bbaaea0f..42a17b90a74 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -32,7 +32,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFo import com.scalableminds.webknossos.tracingstore.tracings.volume.{ VolumeDataZipFormat, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging @@ -336,7 +336,7 @@ class AnnotationIOController @Inject()( fallbackLayer = fallbackLayerOpt.map(_.name), largestSegmentId = combineLargestSegmentIdsByPrecedence(volumeTracing.largestSegmentId, fallbackLayerOpt.map(_.largestSegmentId)), - mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), + mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayerOpt).map(vec3IntToProto), hasSegmentIndex = Some(tracingCanHaveSegmentIndex) ) } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 9f7a48c0aed..8cee2a6f915 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -32,7 +32,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFo import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, VolumeTracingDefaults, - VolumeTracingDownsampling + VolumeTracingMags } import com.typesafe.scalalogging.LazyLogging import models.annotation.AnnotationState._ @@ -72,7 +72,6 @@ class AnnotationService @Inject()( annotationInformationProvider: AnnotationInformationProvider, savedTracingInformationHandler: SavedTracingInformationHandler, annotationDAO: AnnotationDAO, - annotationLayersDAO: AnnotationLayerDAO, userDAO: UserDAO, taskTypeDAO: TaskTypeDAO, taskService: TaskService, @@ -131,7 +130,7 @@ class AnnotationService @Inject()( magRestrictions: MagRestrictions, mappingName: Option[String] ): Fox[VolumeTracing] = { - val mags = VolumeTracingDownsampling.magsForVolumeTracing(dataSource, fallbackLayer) + val mags = VolumeTracingMags.magsForVolumeTracing(dataSource, fallbackLayer) val magsRestricted = magRestrictions.filterAllowed(mags) val additionalAxes = fallbackLayer.map(_.additionalAxes).getOrElse(dataSource.additionalAxesUnion) @@ -306,10 +305,6 @@ class AnnotationService @Inject()( _ <- annotationDAO.insertOne(annotation) } yield annotation - def downsampleAnnotation(annotation: Annotation, volumeAnnotationLayer: AnnotationLayer)( - implicit ctx: DBAccessContext): Fox[Unit] = - ??? // TODO: remove feature or implement as update action - // WARNING: needs to be repeatable, might be called multiple times for an annotation def finish(annotation: Annotation, user: User, restrictions: AnnotationRestrictions)( implicit ctx: DBAccessContext): Fox[String] = { @@ -349,11 +344,6 @@ class AnnotationService @Inject()( }).flatten } - private def baseForTask(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[Annotation] = - (for { - list <- annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.TracingBase) - } yield list.headOption.toFox).flatten - def annotationsFor(taskId: ObjectId)(implicit ctx: DBAccessContext): Fox[List[Annotation]] = annotationDAO.findAllByTaskIdAndType(taskId, AnnotationType.Task) @@ -709,45 +699,13 @@ class AnnotationService @Inject()( updated <- annotationInformationProvider.provideAnnotation(typ, id, issuingUser) } yield updated - def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = // TODO: implement as update action? - annotation.typ match { - case AnnotationType.Explorational => - Fox.failure("annotation.revert.tasksOnly") - case AnnotationType.Task => - for { - task <- taskFor(annotation) - oldSkeletonTracingIdOpt <- annotation.skeletonTracingId // This also asserts that the annotation does not have multiple volume/skeleton layers - oldVolumeTracingIdOpt <- annotation.volumeTracingId - _ = logger.warn( - s"Resetting annotation ${annotation._id} to base, discarding skeleton tracing $oldSkeletonTracingIdOpt and/or volume tracing $oldVolumeTracingIdOpt") - annotationBase <- baseForTask(task._id) - dataset <- datasetDAO.findOne(annotationBase._dataset)(GlobalAccessContext) ?~> "dataset.notFoundForAnnotation" - (newSkeletonIdOpt, newVolumeIdOpt) <- tracingsFromBase(annotationBase, dataset) - _ <- Fox.bool2Fox(newSkeletonIdOpt.isDefined || newVolumeIdOpt.isDefined) ?~> "annotation.needsEitherSkeletonOrVolume" - _ <- Fox.runOptional(newSkeletonIdOpt)(newSkeletonId => - oldSkeletonTracingIdOpt.toFox.map { oldSkeletonId => - annotationLayersDAO.replaceTracingId(annotation._id, oldSkeletonId, newSkeletonId) - }) - _ <- Fox.runOptional(newVolumeIdOpt)(newVolumeId => - oldVolumeTracingIdOpt.toFox.map { oldVolumeId => - annotationLayersDAO.replaceTracingId(annotation._id, oldVolumeId, newVolumeId) - }) - } yield () - } - - private def tracingsFromBase(annotationBase: Annotation, dataset: Dataset)( - implicit ctx: DBAccessContext, - m: MessagesProvider): Fox[(Option[String], Option[String])] = + def resetToBase(annotation: Annotation)(implicit ctx: DBAccessContext): Fox[Unit] = for { - _ <- bool2Fox(dataset.isUsable) ?~> Messages("dataset.notImported", dataset.name) + _ <- bool2Fox(annotation.typ == AnnotationType.Task) ?~> "annotation.revert.tasksOnly" + dataset <- datasetDAO.findOne(annotation._dataset) tracingStoreClient <- tracingStoreService.clientFor(dataset) - baseSkeletonIdOpt <- annotationBase.skeletonTracingId - baseVolumeIdOpt <- annotationBase.volumeTracingId - newSkeletonId: Option[String] <- Fox.runOptional(baseSkeletonIdOpt)(skeletonId => - tracingStoreClient.duplicateSkeletonTracing(skeletonId)) - newVolumeId: Option[String] <- Fox.runOptional(baseVolumeIdOpt)(volumeId => - tracingStoreClient.duplicateVolumeTracing(volumeId)) - } yield (newSkeletonId, newVolumeId) + _ <- tracingStoreClient.resetToBase(annotation._id) ?~> "annotation.revert.failed" + } yield () private def settingsFor(annotation: Annotation)(implicit ctx: DBAccessContext) = if (annotation.typ == AnnotationType.Task || annotation.typ == AnnotationType.TracingBase) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index b832300245f..58a5a9f8bff 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -241,4 +241,11 @@ class WKRemoteTracingStoreClient( } yield data } + def resetToBase(annotationId: ObjectId): Fox[Unit] = + for { + _ <- rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/resetToBase").withLongTimeout + .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .post() + } yield () + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala similarity index 98% rename from webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala rename to webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala index 0210da35f84..c616069a119 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala @@ -24,7 +24,7 @@ import scala.collection.mutable import scala.concurrent.ExecutionContext import scala.reflect.ClassTag -object VolumeTracingDownsampling { +object VolumeTracingMags { private def magsForVolumeTracingByLayerName(dataSource: DataSourceLike, fallbackLayerName: Option[String]): List[Vec3Int] = { val fallbackLayer: Option[DataLayerLike] = @@ -48,7 +48,7 @@ object VolumeTracingDownsampling { } } -trait VolumeTracingDownsampling +trait VolumeTracingMags extends BucketKeys with ProtoGeometryImplicits with VolumeBucketCompression @@ -270,7 +270,7 @@ trait VolumeTracingDownsampling implicit tc: TokenContext): Fox[List[Vec3Int]] = for { dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSourceForTracing(oldTracingId) - magsForTracing = VolumeTracingDownsampling.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) + magsForTracing = VolumeTracingMags.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) } yield magsForTracing.sortBy(_.maxDim) protected def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index bd8812f36a5..6ecf396ec85 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -61,7 +61,7 @@ class VolumeTracingService @Inject()( volumeSegmentIndexService: VolumeSegmentIndexService ) extends TracingService[VolumeTracing] with VolumeTracingBucketHelper - with VolumeTracingDownsampling + with VolumeTracingMags with WKWDataFormatHelper with FallbackDataHelper with DataFinder From bc51deb16b6282c4347e8dea7c168812fdee445b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 11:23:46 +0100 Subject: [PATCH 144/361] remove volume tracing downsampling feature (backend) --- .../tracings/volume/VolumeTracingMags.scala | 257 +----------------- .../volume/VolumeTracingService.scala | 24 +- 2 files changed, 9 insertions(+), 272 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala index c616069a119..1c7f316bb39 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingMags.scala @@ -1,36 +1,13 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume -import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto -import com.scalableminds.webknossos.tracingstore.TSRemoteWebknossosClient +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.tracingstore.tracings.{ - FossilDBClient, - KeyValueStoreImplicits, - TracingDataStore, - VersionedKeyValuePair -} -import net.liftweb.common.Empty import com.scalableminds.webknossos.datastore.geometry.{Vec3IntProto => ProtoPoint3D} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import net.liftweb.common.Box import play.api.libs.json.{Format, Json} -import scala.collection.mutable -import scala.concurrent.ExecutionContext -import scala.reflect.ClassTag - -object VolumeTracingMags { - private def magsForVolumeTracingByLayerName(dataSource: DataSourceLike, - fallbackLayerName: Option[String]): List[Vec3Int] = { - val fallbackLayer: Option[DataLayerLike] = - fallbackLayerName.flatMap(name => dataSource.dataLayers.find(_.name == name)) - magsForVolumeTracing(dataSource, fallbackLayer) - } +object VolumeTracingMags extends ProtoGeometryImplicits { def magsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Vec3Int] = { val fallbackLayerMags = fallbackLayer.map(_.resolutions) @@ -46,241 +23,15 @@ object VolumeTracingMags { } }.sortBy(_.maxDim) } -} - -trait VolumeTracingMags - extends BucketKeys - with ProtoGeometryImplicits - with VolumeBucketCompression - with KeyValueStoreImplicits - with ReversionHelper - with FoxImplicits { - - val tracingDataStore: TracingDataStore - val tracingStoreWkRpcClient: TSRemoteWebknossosClient - protected def saveBucket(dataLayer: VolumeTracingLayer, - bucket: BucketPosition, - data: Array[Byte], - version: Long, - toTemporaryStore: Boolean = false): Fox[Unit] - - protected def updateSegmentIndex(segmentIndexBuffer: VolumeSegmentIndexBuffer, - bucketPosition: BucketPosition, - bucketBytes: Array[Byte], - previousBucketBytesBox: Box[Array[Byte]], - elementClass: ElementClassProto, - mappingName: Option[String], - editableMappingTracingId: Option[String]): Fox[Unit] - - protected def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] - - protected def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] - - protected def volumeSegmentIndexClient: FossilDBClient - - protected def downsampleWithLayer( - annotationId: String, // TODO required? - tracingId: String, - oldTracingId: String, - newTracing: VolumeTracing, - dataLayer: VolumeTracingLayer, - tracingService: VolumeTracingService)(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Vec3Int]] = { - val bucketVolume = 32 * 32 * 32 - for { - _ <- bool2Fox(newTracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(newTracing.mags.nonEmpty) ?~> "Cannot downsample tracing with no mag list" - sourceMag = getSourceMag(newTracing) - magsToCreate <- getMagsToCreate(newTracing, oldTracingId) - elementClass = elementClassFromProto(newTracing.elementClass) - bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]().withDefault(_ => revertedValue) - _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) - originalBucketPositions = bucketDataMapMutable.keys.toList - updatedBucketsMutable = new mutable.ListBuffer[BucketPosition]() - _ = magsToCreate.foldLeft(sourceMag) { (previousMag, requiredMag) => - downsampleMagFromMag(previousMag, - requiredMag, - originalBucketPositions, - bucketDataMapMutable, - updatedBucketsMutable, - bucketVolume, - elementClass, - dataLayer) - requiredMag - } - fallbackLayer <- tracingService.getFallbackLayer(oldTracingId, newTracing) // remote wk does not know the new id yet - segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, - volumeSegmentIndexClient, - newTracing.version, - tracingService.remoteDatastoreClient, - fallbackLayer, - dataLayer.additionalAxes, - tc) - _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => - for { - _ <- saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), newTracing.version) - mappingName <- selectMappingName(newTracing) - _ <- Fox.runIfOptionTrue(newTracing.hasSegmentIndex)( - updateSegmentIndex( - segmentIndexBuffer, - bucketPosition, - bucketDataMapMutable(bucketPosition), - Empty, - newTracing.elementClass, - mappingName, - editableMappingTracingId(newTracing, tracingId) - )) - } yield () - } - _ <- segmentIndexBuffer.flush() - _ = logger.debug(s"Downsampled mags $magsToCreate from $sourceMag for volume tracing $tracingId.") - } yield sourceMag :: magsToCreate - } - - private def fillMapWithSourceBucketsInplace(bucketDataMap: mutable.Map[BucketPosition, Array[Byte]], - tracingId: String, - dataLayer: VolumeTracingLayer, - sourceMag: Vec3Int): Unit = { - val data: List[VersionedKeyValuePair[Array[Byte]]] = - tracingDataStore.volumeData.getMultipleKeys(None, Some(tracingId)) - data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - val bucketPositionOpt = parseBucketKey(keyValuePair.key, dataLayer.additionalAxes).map(_._2) - bucketPositionOpt.foreach { bucketPosition => - if (bucketPosition.mag == sourceMag) { - bucketDataMap(bucketPosition) = decompressIfNeeded(keyValuePair.value, - expectedUncompressedBucketSizeFor(dataLayer), - s"bucket $bucketPosition during downsampling") - } - } - } - } - - private def downsampleMagFromMag(previousMag: Vec3Int, - requiredMag: Vec3Int, - originalBucketPositions: List[BucketPosition], - bucketDataMapMutable: mutable.Map[BucketPosition, Array[Byte]], - updatedBucketsMutable: mutable.ListBuffer[BucketPosition], - bucketVolume: Int, - elementClass: ElementClass.Value, - dataLayer: VolumeTracingLayer): Unit = { - val downScaleFactor = - Vec3Int(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) - downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => - val sourceBuckets: Seq[BucketPosition] = - sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) - val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMapMutable(_)) - val downsampledData: Array[Byte] = - if (sourceData.forall(_.sameElements(revertedValue))) - revertedValue - else { - val sourceDataFilled = fillZeroedIfNeeded(sourceData, bucketVolume, dataLayer.bytesPerElement) - val sourceDataTyped = UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) - val dataDownscaledTyped = - downsampleData(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor, bucketVolume) - UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) - } - bucketDataMapMutable(downsampledBucketPosition) = downsampledData - updatedBucketsMutable += downsampledBucketPosition - } - } - - private def downsampledBucketPositions(originalBucketPositions: List[BucketPosition], - requiredMag: Vec3Int): Set[BucketPosition] = - originalBucketPositions.map { bucketPosition: BucketPosition => - BucketPosition( - (bucketPosition.voxelMag1X / requiredMag.x / 32) * requiredMag.x * 32, - (bucketPosition.voxelMag1Y / requiredMag.y / 32) * requiredMag.y * 32, - (bucketPosition.voxelMag1Z / requiredMag.z / 32) * requiredMag.z * 32, - requiredMag, - bucketPosition.additionalCoordinates - ) - }.toSet - - private def sourceBucketPositionsFor(bucketPosition: BucketPosition, - downScaleFactor: Vec3Int, - previousMag: Vec3Int): Seq[BucketPosition] = - for { - z <- 0 until downScaleFactor.z - y <- 0 until downScaleFactor.y - x <- 0 until downScaleFactor.x - } yield { - BucketPosition( - bucketPosition.voxelMag1X + x * bucketPosition.bucketLength * previousMag.x, - bucketPosition.voxelMag1Y + y * bucketPosition.bucketLength * previousMag.y, - bucketPosition.voxelMag1Z + z * bucketPosition.bucketLength * previousMag.z, - previousMag, - bucketPosition.additionalCoordinates - ) - } - - private def fillZeroedIfNeeded(sourceData: Seq[Array[Byte]], - bucketVolume: Int, - bytesPerElement: Int): Seq[Array[Byte]] = - // Reverted buckets and missing buckets are represented by a single zero-byte. - // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. - sourceData.map { sourceBucketData => - if (isRevertedElement(sourceBucketData)) { - Array.fill[Byte](bucketVolume * bytesPerElement)(0) - } else sourceBucketData - } - - private def downsampleData[T: ClassTag](data: Array[Array[T]], - downScaleFactor: Vec3Int, - bucketVolume: Int): Array[T] = { - val result = new Array[T](bucketVolume) - for { - z <- 0 until 32 - y <- 0 until 32 - x <- 0 until 32 - } { - val voxelSourceData: IndexedSeq[T] = for { - z_offset <- 0 until downScaleFactor.z - y_offset <- 0 until downScaleFactor.y - x_offset <- 0 until downScaleFactor.x - } yield { - val sourceVoxelPosition = - Vec3Int(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) - val sourceBucketPosition = - Vec3Int(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Vec3Int(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - data(sourceBucketIndex)(sourceVoxelIndex) - } - result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) - } - result - } - - private def mode[T](items: Seq[T]): T = - items.groupBy(i => i).view.mapValues(_.size).maxBy(_._2)._1 - - private def getSourceMag(tracing: VolumeTracing): Vec3Int = - tracing.mags.minBy(_.maxDim) - - private def getMagsToCreate(tracing: VolumeTracing, oldTracingId: String)( - implicit tc: TokenContext): Fox[List[Vec3Int]] = - for { - requiredMags <- getRequiredMags(tracing, oldTracingId) - sourceMag = getSourceMag(tracing) - magsToCreate = requiredMags.filter(_.maxDim > sourceMag.maxDim) - } yield magsToCreate - - private def getRequiredMags(tracing: VolumeTracing, oldTracingId: String)( - implicit tc: TokenContext): Fox[List[Vec3Int]] = - for { - dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSourceForTracing(oldTracingId) - magsForTracing = VolumeTracingMags.magsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) - } yield magsForTracing.sortBy(_.maxDim) - protected def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { + def restrictMagList(tracing: VolumeTracing, magRestrictions: MagRestrictions): VolumeTracing = { val tracingMags = resolveLegacyMagList(tracing.mags) val allowedMags = magRestrictions.filterAllowed(tracingMags.map(vec3IntFromProto)) tracing.withMags(allowedMags.map(vec3IntToProto)) } - protected def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = + def resolveLegacyMagList(mags: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = if (mags.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else mags } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 6ecf396ec85..3aea6db57be 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -61,7 +61,6 @@ class VolumeTracingService @Inject()( volumeSegmentIndexService: VolumeSegmentIndexService ) extends TracingService[VolumeTracing] with VolumeTracingBucketHelper - with VolumeTracingMags with WKWDataFormatHelper with FallbackDataHelper with DataFinder @@ -90,7 +89,7 @@ class VolumeTracingService @Inject()( private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) - override protected def updateSegmentIndex( + private def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, bucketBytes: Array[Byte], @@ -173,10 +172,10 @@ class VolumeTracingService @Inject()( } } yield volumeTracing - override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = + def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None - def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = + private def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = if (tracing.getHasEditableMapping) Fox.failure("mappingName called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) @@ -495,7 +494,7 @@ class VolumeTracingService @Inject()( editRotation: Option[Vec3Double], newVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext): Fox[VolumeTracing] = { val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, datasetBoundingBox) - val tracingWithMagRestrictions = restrictMagList(tracingWithBB, magRestrictions) + val tracingWithMagRestrictions = VolumeTracingMags.restrictMagList(tracingWithBB, magRestrictions) for { fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) @@ -607,19 +606,6 @@ class VolumeTracingService @Inject()( toTemporaryStore) } yield id - // TODO use or remove - def downsample(annotationId: String, tracingId: String, oldTracingId: String, newTracing: VolumeTracing)( - implicit tc: TokenContext): Fox[Unit] = - for { - resultingMags <- downsampleWithLayer(annotationId, - tracingId, - oldTracingId, - newTracing, - volumeTracingLayer(tracingId, newTracing), - this) - _ <- updateMagList(tracingId, newTracing, resultingMags.toSet) - } yield () - def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty @@ -829,7 +815,7 @@ class VolumeTracingService @Inject()( else { val magSet = magSetFromZipfile(zipFile) val magsDoMatch = - magSet.isEmpty || magSet == resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet + magSet.isEmpty || magSet == VolumeTracingMags.resolveLegacyMagList(tracing.mags).map(vec3IntFromProto).toSet if (!magsDoMatch) Fox.failure("annotation.volume.magssDoNotMatch") From 7150c2562640295304e0ddc4eba114d2ab3d81cd Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 11:56:00 +0100 Subject: [PATCH 145/361] also duplicate annotation v0 --- .../annotation/TSAnnotationService.scala | 16 ++++++++++++++-- .../tracings/volume/VolumeTracingService.scala | 2 +- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 2b020c5abdc..ed50ed09882 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -575,7 +575,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - // TODO duplicate v0 as well? (if current version is not v0) def duplicate( annotationId: String, newAnnotationId: String, @@ -583,13 +582,26 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss isFromTask: Boolean, datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { + // Duplicate v0 + v0Annotation <- get(annotationId, Some(0L)) + v0NewLayers <- Fox.serialCombined(v0Annotation.annotationLayers)(layer => + duplicateLayer(annotationId, layer, v0Annotation.version, isFromTask, datasetBoundingBox)) + v0DuplicatedAnnotation = v0Annotation.copy(annotationLayers = v0NewLayers, + earliestAccessibleVersion = v0Annotation.version) + + _ <- tracingDataStore.annotations.put(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) + + // Duplicate current currentAnnotation <- get(annotationId, version) newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => duplicateLayer(annotationId, layer, currentAnnotation.version, isFromTask, datasetBoundingBox)) - _ <- duplicateUpdates(annotationId, newAnnotationId) duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, earliestAccessibleVersion = currentAnnotation.version) _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) + + // Duplicate updates + _ <- duplicateUpdates(annotationId, newAnnotationId) + } yield duplicatedAnnotation private def duplicateUpdates(annotationId: String, newAnnotationId: String)( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3aea6db57be..3ee4de5b733 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -873,7 +873,7 @@ class VolumeTracingService @Inject()( persist: Boolean): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { - _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (used by compound annotations)" + _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox From 3ffdf3b97df1a61cdd84c9a05c95e66050ff066e Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 12:13:13 +0100 Subject: [PATCH 146/361] use SequenceUtils --- .../controllers/TSAnnotationController.scala | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 2f543b04548..3ca55233653 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.controllers +import collections.SequenceUtils import com.google.inject.Inject import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox @@ -167,9 +168,12 @@ class TSAnnotationController @Inject()( volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume)) newSkeletonId = TracingId.generate newVolumeId = TracingId.generate - mergedSkeletonName = allEqual(skeletonLayers.map(_.name)) + mergedSkeletonName = SequenceUtils + .findUniqueElement(skeletonLayers.map(_.name)) .getOrElse(AnnotationLayer.defaultSkeletonLayerName) - mergedVolumeName = allEqual(volumeLayers.map(_.name)).getOrElse(AnnotationLayer.defaultVolumeLayerName) + mergedVolumeName = SequenceUtils + .findUniqueElement(volumeLayers.map(_.name)) + .getOrElse(AnnotationLayer.defaultVolumeLayerName) // TODO: Merge updates? if so, iron out reverts? // TODO: Merge editable mappings volumeTracings <- annotationService @@ -222,10 +226,4 @@ class TSAnnotationController @Inject()( } } - // TODO generalize, mix with assertAllOnSame* - private def allEqual(str: Seq[String]): Option[String] = - // returns the str if all names are equal, None otherwise - str.headOption.map(name => str.forall(_ == name)).flatMap { _ => - str.headOption - } } From fb1b595b323439358fd80a421ca7ea911173584f Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 12:17:52 +0100 Subject: [PATCH 147/361] cleanup todo comments --- app/models/annotation/WKRemoteTracingStoreClient.scala | 4 +--- .../datastore/services/DSRemoteTracingstoreClient.scala | 2 -- .../tracingstore/annotation/TSAnnotationService.scala | 4 ++-- .../tracingstore/controllers/TSAnnotationController.scala | 4 ++-- .../tracings/editablemapping/EditableMappingStreams.scala | 4 ++-- .../tracingstore/tracings/volume/VolumeTracingService.scala | 3 +-- 6 files changed, 8 insertions(+), 13 deletions(-) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 58a5a9f8bff..eb516b9ed5d 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -108,8 +108,7 @@ class WKRemoteTracingStoreClient( } // Used in task creation. History is dropped, new version will be zero. - // TODO: currently also used in resetToBase. Fix that. - def duplicateSkeletonTracing(skeletonTracingId: String, // TODO: might also need annotation id + def duplicateSkeletonTracing(skeletonTracingId: String, editPosition: Option[Vec3Int] = None, editRotation: Option[Vec3Double] = None, boundingBox: Option[BoundingBox] = None): Fox[String] = @@ -121,7 +120,6 @@ class WKRemoteTracingStoreClient( .postWithJsonResponse[String]() // Used in task creation. History is dropped, new version will be zero. - // TODO: currently also used in resetToBase. Fix that. def duplicateVolumeTracing(volumeTracingId: String, magRestrictions: MagRestrictions = MagRestrictions.empty, editPosition: Option[Vec3Int] = None, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala index 42473d29a21..5bd69d4d7c9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteTracingstoreClient.scala @@ -70,11 +70,9 @@ class DSRemoteTracingstoreClient @Inject()( rpc(s"$tracingStoreUri/tracings/volume/${getZarrVersionDependantSubPath(zarrVersion)}/json/$tracingId").withTokenFromContext .getWithJsonResponse[List[String]] - // TODO annotation id def getZGroup(tracingId: String, tracingStoreUri: String)(implicit tc: TokenContext): Fox[JsObject] = rpc(s"$tracingStoreUri/tracings/volume/zarr/$tracingId/.zgroup").withTokenFromContext.getWithJsonResponse[JsObject] - // TODO annotation id def getEditableMappingSegmentIdsForAgglomerate(tracingStoreUri: String, tracingId: String, agglomerateId: Long)( implicit tc: TokenContext): Fox[EditableMappingSegmentListResult] = rpc(s"$tracingStoreUri/tracings/mapping/$tracingId/segmentsForAgglomerate") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ed50ed09882..82c44609e27 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -157,7 +157,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case a: RevertToVersionAnnotationAction => - revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO: if the revert action is not isolated, we need not the target version of all but the target version of this update + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO double check that the revert action is isolated and targetVersion is that of group case _: ResetToBaseAnnotationAction => resetToBase(annotationId, annotationWithTracings, targetVersion) case _: BucketMutatingVolumeUpdateAction => @@ -269,7 +269,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId, annotationWithTracings, annotation.version, - targetVersion) // TODO: targetVersion must be set per update group, as reverts may come between these + targetVersion) // TODO double-check that targetVersion is set per update group, as reverts may come between these updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 3ca55233653..0a8fb5081fe 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -174,8 +174,8 @@ class TSAnnotationController @Inject()( mergedVolumeName = SequenceUtils .findUniqueElement(volumeLayers.map(_.name)) .getOrElse(AnnotationLayer.defaultVolumeLayerName) - // TODO: Merge updates? if so, iron out reverts? - // TODO: Merge editable mappings + // TODO Merge updates? if so, iron out reverts? + // TODO Merge editable mappings volumeTracings <- annotationService .findMultipleVolumes(volumeLayers.map { l => Some(TracingSelector(l.tracingId)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala index cae82ad526c..e9c7422ecd3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -61,7 +61,7 @@ class VersionedAgglomerateToGraphIterator(prefix: String, case None => getNextNonRevertedGraph.get } nextGraph = None - // TODO: parse graph key? (=agglomerate id) + // TODO parse graph key? (=agglomerate id) (nextRes.key, nextRes.value, nextRes.version) } @@ -116,7 +116,7 @@ class VersionedSegmentToAgglomerateChunkIterator(prefix: String, case None => getNextNonRevertedChunk.get } nextChunk = None - // TODO: parse chunk key? + // TODO parse chunk key? (nextRes.key, nextRes.value, nextRes.version) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3ee4de5b733..02902f222ce 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -46,7 +46,6 @@ import scala.concurrent.duration._ class VolumeTracingService @Inject()( val tracingDataStore: TracingDataStore, - val tracingStoreWkRpcClient: TSRemoteWebknossosClient, val adHocMeshServiceHolder: AdHocMeshServiceHolder, implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], implicit val temporaryVolumeDataStore: TemporaryVolumeDataStore, @@ -878,7 +877,7 @@ class VolumeTracingService @Inject()( remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - // TODO: _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + // TODO _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty From 89c4ec28103118b16937c93139cf1ef83e85dc6c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 5 Nov 2024 13:28:09 +0100 Subject: [PATCH 148/361] remap action tracing ids in duplicate --- .../annotation/TSAnnotationService.scala | 76 +++++++++++++------ .../annotation/UpdateActions.scala | 1 + .../SkeletonTracingController.scala | 8 +- .../controllers/VolumeTracingController.scala | 34 ++------- .../EditableMappingUpdateActions.scala | 8 +- .../updating/SkeletonUpdateActions.scala | 66 ++++++++-------- .../tracings/volume/VolumeUpdateActions.scala | 28 +++++++ 7 files changed, 134 insertions(+), 87 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 82c44609e27..af980386d86 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -582,10 +582,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss isFromTask: Boolean, datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { - // Duplicate v0 v0Annotation <- get(annotationId, Some(0L)) + + // Duplicate updates + tracingIdMap <- duplicateUpdates(annotationId, newAnnotationId, v0Annotation.annotationLayers.map(_.tracingId)) + + // Duplicate v0 v0NewLayers <- Fox.serialCombined(v0Annotation.annotationLayers)(layer => - duplicateLayer(annotationId, layer, v0Annotation.version, isFromTask, datasetBoundingBox)) + duplicateLayer(annotationId, layer, tracingIdMap, v0Annotation.version, isFromTask, datasetBoundingBox)) v0DuplicatedAnnotation = v0Annotation.copy(annotationLayers = v0NewLayers, earliestAccessibleVersion = v0Annotation.version) @@ -594,40 +598,62 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss // Duplicate current currentAnnotation <- get(annotationId, version) newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => - duplicateLayer(annotationId, layer, currentAnnotation.version, isFromTask, datasetBoundingBox)) + duplicateLayer(annotationId, layer, tracingIdMap, currentAnnotation.version, isFromTask, datasetBoundingBox)) duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, earliestAccessibleVersion = currentAnnotation.version) _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) - // Duplicate updates - _ <- duplicateUpdates(annotationId, newAnnotationId) - } yield duplicatedAnnotation - private def duplicateUpdates(annotationId: String, newAnnotationId: String)( - implicit ec: ExecutionContext): Fox[Unit] = + private def duplicateUpdates(annotationId: String, newAnnotationId: String, v0TracingIds: Seq[String])( + implicit ec: ExecutionContext): Fox[Map[String, String]] = { + val tracingIdMapMutable = scala.collection.mutable.Map[String, String]() + v0TracingIds.foreach { v0TracingId => + tracingIdMapMutable.put(v0TracingId, TracingId.generate) + } // TODO memory: batch + for { - updatesAsBytes: Seq[(Long, Array[Byte])] <- tracingDataStore.annotationUpdates - .getMultipleVersionsAsVersionValueTuple(annotationId) - _ <- Fox.serialCombined(updatesAsBytes) { - case (version, updateBytes) => - tracingDataStore.annotationUpdates.put(newAnnotationId, version, updateBytes) + updateLists: Seq[(Long, List[UpdateAction])] <- tracingDataStore.annotationUpdates + .getMultipleVersionsAsVersionValueTuple(annotationId)(fromJsonBytes[List[UpdateAction]]) + _ <- Fox.serialCombined(updateLists) { + case (version, updateList) => + for { + updateListAdapted <- Fox.serialCombined(updateList) { + case a: AddLayerAnnotationAction => + for { + actionTracingId <- a.tracingId ?~> "duplicating addLayer without tracingId" + _ = if (!tracingIdMapMutable.contains(actionTracingId)) { + a.tracingId.foreach(actionTracingId => tracingIdMapMutable.put(actionTracingId, TracingId.generate)) + } + mappedTracingId <- tracingIdMapMutable.get(actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.copy(tracingId = Some(mappedTracingId)) + case a: LayerUpdateAction => + for { + mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.withActionTracingId(mappedTracingId) + } + _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) + } yield () } - } yield () + } yield tracingIdMapMutable.toMap + } private def duplicateLayer(annotationId: String, layer: AnnotationLayerProto, + tracingIdMap: Map[String, String], version: Long, isFromTask: Boolean, datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationLayerProto] = for { - newTracingId <- layer.`type` match { + newTracingId <- tracingIdMap.get(layer.tracingId) ?~> "duplicate unknown layer" + _ <- layer.`type` match { case AnnotationLayerTypeProto.Volume => duplicateVolumeTracing(annotationId, layer.tracingId, version, + newTracingId, version, isFromTask, None, @@ -636,7 +662,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss None, None) case AnnotationLayerTypeProto.Skeleton => - duplicateSkeletonTracing(annotationId, layer.tracingId, version, version, isFromTask, None, None, None) + duplicateSkeletonTracing(annotationId, + layer.tracingId, + version, + newTracingId, + version, + isFromTask, + None, + None, + None) case AnnotationLayerTypeProto.Unrecognized(num) => Fox.failure(f"unrecognized annotation layer type: $num") } } yield layer.copy(tracingId = newTracingId) @@ -645,14 +679,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss sourceAnnotationId: String, sourceTracingId: String, sourceVersion: Long, + newTracingId: String, newVersion: Long, isFromTask: Boolean, boundingBox: Option[BoundingBox], datasetBoundingBox: Option[BoundingBox], magRestrictions: MagRestrictions, editPosition: Option[Vec3Int], - editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { - val newTracingId = TracingId.generate + editRotation: Option[Vec3Double])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = for { sourceTracing <- findVolume(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) newTracing <- volumeTracingService.adaptVolumeForDuplicate(sourceTracingId, @@ -671,7 +705,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- Fox.runIf(newTracing.getHasEditableMapping)( duplicateEditableMapping(sourceAnnotationId, sourceTracingId, newTracingId, sourceVersion, newVersion)) } yield newTracingId - } private def duplicateEditableMapping(sourceAnnotationId: String, sourceTracingId: String, @@ -692,12 +725,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss sourceAnnotationId: String, sourceTracingId: String, sourceVersion: Long, + newTracingId: String, newVersion: Long, isFromTask: Boolean, editPosition: Option[Vec3Int], editRotation: Option[Vec3Double], - boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = { - val newTracingId = TracingId.generate + boundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[String] = for { skeleton <- findSkeleton(sourceAnnotationId, sourceTracingId, Some(sourceVersion)) adaptedSkeleton = skeletonTracingService.adaptSkeletonForDuplicate(skeleton, @@ -708,6 +741,5 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newVersion) _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index c81be809cb7..e10789e0210 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -56,6 +56,7 @@ trait ApplyImmediatelyUpdateAction extends UpdateAction trait LayerUpdateAction extends UpdateAction { def actionTracingId: String + def withActionTracingId(newTracingId: String): LayerUpdateAction } object UpdateAction { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 0c3bfbc1b1d..431bc0ead7e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} import play.api.i18n.Messages @@ -101,7 +101,10 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin for { mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten)) processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) - newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, toTemporaryStore = !persist) + newId <- skeletonTracingService.save(processedTracing, + None, + processedTracing.version, + toTemporaryStore = !persist) } yield Ok(Json.toJson(newId)) } } @@ -126,6 +129,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin annotationId, sourceTracingId = tracingId, sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, newVersion = 0, editPosition = editPositionParsed, editRotation = editRotationParsed, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index c7444ae982d..b3683464bff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -8,40 +8,17 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, Volu import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.helpers.{ - GetSegmentIndexParameters, - ProtoGeometryImplicits, - SegmentStatisticsParameters -} +import com.scalableminds.webknossos.datastore.helpers.{GetSegmentIndexParameters, ProtoGeometryImplicits, SegmentStatisticsParameters} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{ - LengthUnit, - VoxelSize, - WebknossosAdHocMeshRequest, - WebknossosDataRequest -} +import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize, WebknossosAdHocMeshRequest, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - ImportVolumeDataVolumeAction, - MagRestrictions, - MergedVolumeStats, - TSFullMeshService, - VolumeDataZipFormat, - VolumeSegmentIndexService, - VolumeSegmentStatisticsService, - VolumeTracingService -} -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingSelector} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingStoreConfig -} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ImportVolumeDataVolumeAction, MagRestrictions, MergedVolumeStats, TSFullMeshService, VolumeDataZipFormat, VolumeSegmentIndexService, VolumeSegmentStatisticsService, VolumeTracingService} +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig} import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile @@ -400,6 +377,7 @@ class VolumeTracingController @Inject()( annotationId, sourceTracingId = tracingId, sourceVersion = newestSourceVersion, + newTracingId = TracingId.generate, newVersion = 0, editPosition = editPositionParsed, editRotation = editRotationParsed, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 2bbbda3643a..9cd4ae51d32 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -21,11 +21,11 @@ case class SplitAgglomerateUpdateAction(agglomerateId: Long, info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } object SplitAgglomerateUpdateAction { @@ -47,11 +47,11 @@ case class MergeAgglomerateUpdateAction(agglomerateId1: Long, info: Option[String] = None) extends EditableMappingUpdateAction { override def addTimestamp(timestamp: Long): EditableMappingUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } object MergeAgglomerateUpdateAction { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index 4b7a79a4be8..1ca66e9f5cd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -54,6 +54,8 @@ case class CreateTreeSkeletonAction(id: Int, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteTreeSkeletonAction(id: Int, @@ -67,11 +69,11 @@ case class DeleteTreeSkeletonAction(id: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeSkeletonAction(id: Int, @@ -110,6 +112,8 @@ case class UpdateTreeSkeletonAction(id: Int, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MergeTreeSkeletonAction(sourceId: Int, @@ -137,11 +141,11 @@ case class MergeTreeSkeletonAction(sourceId: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], @@ -179,11 +183,11 @@ case class MoveTreeComponentSkeletonAction(nodeIds: List[Int], override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class CreateEdgeSkeletonAction(source: Int, @@ -202,11 +206,11 @@ case class CreateEdgeSkeletonAction(source: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteEdgeSkeletonAction(source: Int, @@ -225,11 +229,12 @@ case class DeleteEdgeSkeletonAction(source: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) + } case class CreateNodeSkeletonAction(id: Int, @@ -272,11 +277,11 @@ case class CreateNodeSkeletonAction(id: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateNodeSkeletonAction(id: Int, @@ -321,12 +326,11 @@ case class UpdateNodeSkeletonAction(id: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class DeleteNodeSkeletonAction(nodeId: Int, @@ -347,11 +351,11 @@ case class DeleteNodeSkeletonAction(nodeId: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup], @@ -366,11 +370,11 @@ case class UpdateTreeGroupsSkeletonAction(treeGroups: List[UpdateActionTreeGroup override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTracingSkeletonAction(activeNode: Option[Int], @@ -397,11 +401,11 @@ case class UpdateTracingSkeletonAction(activeNode: Option[Int], override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true } @@ -422,11 +426,11 @@ case class UpdateTreeVisibilitySkeletonAction(treeId: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true } @@ -463,11 +467,11 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, @@ -487,11 +491,11 @@ case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], @@ -505,11 +509,11 @@ case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundi override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[Int], @@ -533,11 +537,11 @@ case class UpdateUserBoundingBoxVisibilitySkeletonAction(boundingBoxId: Option[I override def addTimestamp(timestamp: Long): UpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index a3e7ff236b2..20cb63206ed 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -48,6 +48,8 @@ case class UpdateBucketVolumeAction(position: Vec3Int, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) def withoutBase64Data: UpdateBucketVolumeAction = this.copy(base64Data = None) @@ -70,6 +72,8 @@ case class UpdateTracingVolumeAction( override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def isViewOnlyChange: Boolean = true @@ -95,6 +99,8 @@ case class UpdateUserBoundingBoxesVolumeAction(boundingBoxes: List[NamedBounding override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withUserBoundingBoxes(boundingBoxes.map(_.toProto)) @@ -111,6 +117,8 @@ case class UpdateUserBoundingBoxVisibilityVolumeAction(boundingBoxId: Option[Int override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { @@ -137,6 +145,8 @@ case class RemoveFallbackLayerVolumeAction(actionTracingId: String, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.clearFallbackLayer @@ -152,6 +162,8 @@ case class ImportVolumeDataVolumeAction(actionTracingId: String, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(largestSegmentId = largestSegmentId) @@ -166,6 +178,8 @@ case class AddSegmentIndexVolumeAction(actionTracingId: String, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.copy(hasSegmentIndex = Some(true)) @@ -192,6 +206,8 @@ case class CreateSegmentVolumeAction(id: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { val newSegment = @@ -230,6 +246,8 @@ case class UpdateSegmentVolumeAction(id: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = { def segmentTransform(segment: Segment): Segment = @@ -258,6 +276,8 @@ case class DeleteSegmentVolumeAction(id: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = tracing.withSegments(tracing.segments.filter(_.segmentId != id)) @@ -274,6 +294,8 @@ case class DeleteSegmentDataVolumeAction(id: Long, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } case class UpdateMappingNameVolumeAction(mappingName: Option[String], @@ -288,6 +310,8 @@ case class UpdateMappingNameVolumeAction(mappingName: Option[String], override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) override def applyOn(tracing: VolumeTracing): VolumeTracing = if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked @@ -311,6 +335,8 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } // TODO this now exists only for UpdateBucket. Make it a slimmed down version of that rather than generic? @@ -325,6 +351,8 @@ case class CompactVolumeUpdateAction(name: String, override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = this.copy(actionAuthorId = authorId) override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) } object CompactVolumeUpdateAction { From b89a7f6a088e558d913244935e4deedf01cbe271 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 5 Nov 2024 16:22:25 +0100 Subject: [PATCH 149/361] re-add accidentally deleted function --- frontend/javascripts/admin/admin_rest_api.ts | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index b20e7978b7e..faa53e48e81 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -913,6 +913,17 @@ export function getUpdateActionLog( }); } +export function getNewestVersionForTracing( + tracingStoreUrl: string, + annotationId: string, +): Promise { + return doWithToken((token) => + Request.receiveJSON( + `${tracingStoreUrl}/tracings/annotation/${annotationId}/newestVersion?token=${token}`, + ).then((obj) => obj.version), + ); +} + export async function getNewestVersionOfTracing( tracingStoreUrl: string, annotationId: string, From 5089c69c62593b3c578256fac8c51668c70d2a51 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 14:21:06 +0100 Subject: [PATCH 150/361] update todo comments --- .../tracingstore/annotation/TSAnnotationService.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index af980386d86..26fa3731a30 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -157,7 +157,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case a: EditableMappingUpdateAction => annotationWithTracings.applyEditableMappingAction(a) case a: RevertToVersionAnnotationAction => - revertToVersion(annotationId, annotationWithTracings, a, targetVersion) // TODO double check that the revert action is isolated and targetVersion is that of group + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) case _: ResetToBaseAnnotationAction => resetToBase(annotationId, annotationWithTracings, targetVersion) case _: BucketMutatingVolumeUpdateAction => @@ -269,7 +269,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId, annotationWithTracings, annotation.version, - targetVersion) // TODO double-check that targetVersion is set per update group, as reverts may come between these + targetVersion) // Note: this targetVersion is overwritten for each update group, see annotation.withNewUpdaters updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped, @@ -421,7 +421,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = - // TODO skip some flushes to save disk space (e.g. skeletons only nth version, or only if requested?) + // TODO skip some flushes to save disk space (for non-updated layers) for { _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { case (volumeTracingId, volumeTracing) => From 6dfc1ba7ecca5e564e7afda0ebeefa2d90f7264a Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 15:01:50 +0100 Subject: [PATCH 151/361] adapt e2e tests --- docker-compose.yml | 2 +- .../test/backend-snapshot-tests/annotations.e2e.ts | 7 +++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 76f802426bb..26f1250872d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -273,7 +273,7 @@ services: command: - fossildb - -c - - skeletons,skeletonUpdates,volumes,volumeData,volumeUpdates,volumeSegmentIndex,editableMappings,editableMappingUpdates,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate + - skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates user: ${USER_UID:-fossildb}:${USER_GID:-fossildb} fossildb-persisted: diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index ffb9b0243c7..75a08e42fad 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -151,10 +151,9 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue: SaveQueueEntry[]) { - const skeletonTracing = getSkeletonDescriptor(explorational); - if (skeletonTracing == null) throw new Error("No skeleton annotation present."); + console.log("explorational.annotationId:", explorational.annotationId); return sendRequestWithToken( - `${explorational.tracingStore.url}/tracings/skeleton/${skeletonTracing.tracingId}/update?token=`, + `${explorational.tracingStore.url}/tracings/annotation/${explorational.id}/update?token=`, { method: "POST", data: queue, @@ -244,7 +243,7 @@ test("Update Metadata for Skeleton Tracing", async (t) => { }; const updateTreeAction = UpdateActions.updateTree(trees[1]); const [saveQueue] = addVersionNumbers( - createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789), + createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789, createdExplorational.annotationLayers[0].tracingId), 0, ); From cb46fcae1e4dbd4fbf6053e1aeb7e9e9c06151f6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 15:02:32 +0100 Subject: [PATCH 152/361] rename sendUpdateActions in e2e test --- .../test/backend-snapshot-tests/annotations.e2e.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 75a08e42fad..a1dc56c8bb0 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -150,7 +150,7 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); }); -async function sendUpdateActionsForSkeleton(explorational: APIAnnotation, queue: SaveQueueEntry[]) { +async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueEntry[]) { console.log("explorational.annotationId:", explorational.annotationId); return sendRequestWithToken( `${explorational.tracingStore.url}/tracings/annotation/${explorational.id}/update?token=`, @@ -179,7 +179,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { ), 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); @@ -214,7 +214,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { ), 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); @@ -247,7 +247,7 @@ test("Update Metadata for Skeleton Tracing", async (t) => { 0, ); - await sendUpdateActionsForSkeleton(createdExplorational, saveQueue); + await sendUpdateActions(createdExplorational, saveQueue); const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); From c81f28b8afa5c982a8081a728fbcde109c4bc6d4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 15:26:14 +0100 Subject: [PATCH 153/361] skip flushing tracings with no changes --- .../annotation/TSAnnotationService.scala | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 26fa3731a30..119f8ef7721 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -411,7 +411,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updatedWithNewVerson = updated.withVersion(targetVersion) _ = logger.info(s"flushing v$targetVersion, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() - _ <- flushUpdatedTracings(updatedWithNewVerson) + _ <- flushUpdatedTracings(updatedWithNewVerson, updates) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) _ <- Fox.runIf(reportChangesToWk)(remoteWebknossosClient.updateAnnotation( annotationId, @@ -420,24 +420,36 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } - private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings)(implicit ec: ExecutionContext) = - // TODO skip some flushes to save disk space (for non-updated layers) + private def flushUpdatedTracings(annotationWithTracings: AnnotationWithTracings, updates: List[UpdateAction])( + implicit ec: ExecutionContext) = { + // Flush updated tracing objects, but only if they were updated. + // If they weren’t updated, the older versions that will automatically be fetched are guaranteed identical + val tracingIdsWithUpdates = updates.flatMap { + case a: LayerUpdateAction => Some(a.actionTracingId) + case a: AddLayerAnnotationAction => Some(a.tracingId) + case _ => None + }.toSet for { _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { - case (volumeTracingId, volumeTracing) => + case (volumeTracingId, volumeTracing) if tracingIdsWithUpdates.contains(volumeTracingId) => tracingDataStore.volumes.put(volumeTracingId, volumeTracing.version, volumeTracing) + case _ => Fox.successful(()) } _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { - case (skeletonTracingId, skeletonTracing: SkeletonTracing) => + case (skeletonTracingId, skeletonTracing: SkeletonTracing) + if tracingIdsWithUpdates.contains(skeletonTracingId) => tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) + case _ => Fox.successful(()) } _ <- Fox.serialCombined(annotationWithTracings.getEditableMappingsInfo) { - case (volumeTracingId, editableMappingInfo) => + case (volumeTracingId, editableMappingInfo) if tracingIdsWithUpdates.contains(volumeTracingId) => tracingDataStore.editableMappingsInfo.put(volumeTracingId, annotationWithTracings.version, editableMappingInfo) + case _ => Fox.successful(()) } } yield () + } private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) From b1ae06cd66a4147c4eec19851c0def8fe3e4823f Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 15:37:19 +0100 Subject: [PATCH 154/361] remove (broken) morton index from volume bucket keys --- .../dataformats/wkw/WKWDataFormatHelper.scala | 2 +- .../tracings/volume/VolumeTracingBucketHelper.scala | 12 +++++------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala index 98e10cd876d..750b3733283 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.models.BucketPosition trait WKWDataFormatHelper { - val dataFileExtension: String = "wkw" + private val dataFileExtension: String = "wkw" val FILENAME_HEADER_WKW: String = s"header.$dataFileExtension" // Assumes single-bucket wkw files, as for volume tracings diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index fee555b9788..2272915921b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper} +import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.services.DataConverter @@ -80,20 +80,18 @@ trait AdditionalCoordinateKey { } } -trait BucketKeys extends MortonEncoding with WKWDataFormatHelper with LazyLogging with AdditionalCoordinateKey { +trait BucketKeys extends WKWDataFormatHelper with AdditionalCoordinateKey { protected def buildBucketKey(dataLayerName: String, bucket: BucketPosition, - additionalAxes: Option[Seq[AdditionalAxis]]): String = { - val mortonIndex = mortonEncode(bucket.bucketX, bucket.bucketY, bucket.bucketZ) + additionalAxes: Option[Seq[AdditionalAxis]]): String = (bucket.additionalCoordinates, additionalAxes, bucket.hasAdditionalCoordinates) match { case (Some(additionalCoordinates), Some(axes), true) => - s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/$mortonIndex-[${additionalCoordinatesKeyPart( + s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/[${additionalCoordinatesKeyPart( additionalCoordinates, axes)}][${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" case _ => - s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/$mortonIndex-[${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" + s"$dataLayerName/${bucket.mag.toMagLiteral(allowScalar = true)}/[${bucket.bucketX},${bucket.bucketY},${bucket.bucketZ}]" } - } protected def buildKeyPrefix(dataLayerName: String): String = s"$dataLayerName/" From 8b5e2db939e2ace3a24ef0062bd176bfc2ca743a Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 7 Nov 2024 15:49:56 +0100 Subject: [PATCH 155/361] flush all tracings if a revert or resetToBase action is present --- .../annotation/TSAnnotationService.scala | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 119f8ef7721..8b95e172385 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -424,6 +424,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext) = { // Flush updated tracing objects, but only if they were updated. // If they weren’t updated, the older versions that will automatically be fetched are guaranteed identical + val allMayHaveUpdates = updates.exists { update: UpdateAction => + update match { + case _: RevertToVersionAnnotationAction => true + case _: ResetToBaseAnnotationAction => true + case _ => false + } + } val tracingIdsWithUpdates = updates.flatMap { case a: LayerUpdateAction => Some(a.actionTracingId) case a: AddLayerAnnotationAction => Some(a.tracingId) @@ -431,18 +438,19 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss }.toSet for { _ <- Fox.serialCombined(annotationWithTracings.getVolumes) { - case (volumeTracingId, volumeTracing) if tracingIdsWithUpdates.contains(volumeTracingId) => + case (volumeTracingId, volumeTracing) if allMayHaveUpdates || tracingIdsWithUpdates.contains(volumeTracingId) => tracingDataStore.volumes.put(volumeTracingId, volumeTracing.version, volumeTracing) case _ => Fox.successful(()) } _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { case (skeletonTracingId, skeletonTracing: SkeletonTracing) - if tracingIdsWithUpdates.contains(skeletonTracingId) => + if allMayHaveUpdates || tracingIdsWithUpdates.contains(skeletonTracingId) => tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) case _ => Fox.successful(()) } _ <- Fox.serialCombined(annotationWithTracings.getEditableMappingsInfo) { - case (volumeTracingId, editableMappingInfo) if tracingIdsWithUpdates.contains(volumeTracingId) => + case (volumeTracingId, editableMappingInfo) + if allMayHaveUpdates || tracingIdsWithUpdates.contains(volumeTracingId) => tracingDataStore.editableMappingsInfo.put(volumeTracingId, annotationWithTracings.version, editableMappingInfo) From 913d21a80ba5f2713100735810e273c96fca3c54 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 10:22:13 +0100 Subject: [PATCH 156/361] re-connect tracing migration service --- .../annotation/TSAnnotationService.scala | 18 +++++++---- .../tracings/TracingMigrationService.scala | 32 ++++++++----------- .../tracings/TracingService.scala | 20 +----------- 3 files changed, 27 insertions(+), 43 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8b95e172385..66675d8d335 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -33,15 +33,17 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ BucketMutatingVolumeUpdateAction, MagRestrictions, UpdateMappingNameVolumeAction, - VolumeTracingService, + VolumeTracingService } import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, + SkeletonTracingMigrationService, TracingDataStore, TracingId, TracingSelector, - VersionedKeyValuePair + VersionedKeyValuePair, + VolumeTracingMigrationService } import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging @@ -55,6 +57,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss editableMappingService: EditableMappingService, val volumeTracingService: VolumeTracingService, skeletonTracingService: SkeletonTracingService, + skeletonTracingMigrationService: SkeletonTracingMigrationService, + volumeTracingMigrationService: VolumeTracingMigrationService, val remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits @@ -544,8 +548,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = for { annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? - tracing <- annotation.getVolume(tracingId) - } yield tracing + tracing <- annotation.getVolume(tracingId).toFox + migrated <- volumeTracingMigrationService.migrateTracing(tracing) + } yield migrated def findSkeleton( annotationId: String, @@ -558,8 +563,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss else { for { annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? - tracing <- annotation.getSkeleton(tracingId) - } yield tracing + tracing <- annotation.getSkeleton(tracingId).toFox + migrated <- skeletonTracingMigrationService.migrateTracing(tracing) + } yield migrated } def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala index 795e5e1ec9f..ae10d7efb7b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingMigrationService.scala @@ -22,52 +22,48 @@ trait TracingMigrationService[T <: GeneratedMessage] extends FoxImplicits { implicit protected def ec: ExecutionContext // Each migration transforms a tracing and additionally returns whether the tracing was modified - protected def migrations: List[T => Fox[(T, Boolean)]] + protected def migrations: List[T => Fox[T]] - def migrateTracing(tracing: Fox[T]): Fox[(T, Boolean)] = { - def migrateIter(tracingAndChanged: Fox[(T, Boolean)], migrations: List[T => Fox[(T, Boolean)]]): Fox[(T, Boolean)] = + def migrateTracing(tracing: T): Fox[T] = { + def migrateIter(tracingFox: Fox[T], migrations: List[T => Fox[T]]): Fox[T] = migrations match { - case List() => tracingAndChanged + case List() => tracingFox case head :: tail => - tracingAndChanged.futureBox.flatMap { - case Full((tracing, hasChangedPrev)) => - migrateIter(head(tracing).map(t => (t._1, hasChangedPrev || t._2)), tail) + tracingFox.futureBox.flatMap { + case Full(tracing) => + migrateIter(head(tracing), tail) case x => box2Fox(x) } } - migrateIter(tracing.map((_, false)), migrations) + migrateIter(Fox.successful(tracing), migrations) } } class SkeletonTracingMigrationService @Inject()()(implicit val ec: ExecutionContext) extends TracingMigrationService[SkeletonTracing] with ColorGenerator { - override protected val migrations: List[SkeletonTracing => Fox[(SkeletonTracing, Boolean)]] = List( - removeSingleUserBoundingBox) + override protected val migrations: List[SkeletonTracing => Fox[SkeletonTracing]] = List(removeSingleUserBoundingBox) - private def removeSingleUserBoundingBox(tracing: SkeletonTracing): Fox[(SkeletonTracing, Boolean)] = { + private def removeSingleUserBoundingBox(tracing: SkeletonTracing): Fox[SkeletonTracing] = { val newUserBoundingBox: Option[ProtoBox] = tracing.userBoundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 ProtoBox(newId, color = Some(getRandomColor), boundingBox = bb) } - Fox.successful( - (tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox), tracing.userBoundingBox.isDefined)) + Fox.successful(tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox)) } } class VolumeTracingMigrationService @Inject()()(implicit val ec: ExecutionContext) extends TracingMigrationService[VolumeTracing] with ColorGenerator { - override protected val migrations: List[VolumeTracing => Fox[(VolumeTracing, Boolean)]] = List( - removeSingleUserBoundingBox) + override protected val migrations: List[VolumeTracing => Fox[VolumeTracing]] = List(removeSingleUserBoundingBox) - private def removeSingleUserBoundingBox(tracing: VolumeTracing): Fox[(VolumeTracing, Boolean)] = { + private def removeSingleUserBoundingBox(tracing: VolumeTracing): Fox[VolumeTracing] = { val newUserBoundingBox: Option[ProtoBox] = tracing.userBoundingBox.map { bb => val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 ProtoBox(newId, color = Some(getRandomColor), boundingBox = bb) } - Fox.successful( - (tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox), tracing.userBoundingBox.isDefined)) + Fox.successful(tracing.clearUserBoundingBox.addAllUserBoundingBoxes(newUserBoundingBox)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index c8ac3837bd4..4378e671567 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -1,8 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} -import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType +import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore import com.typesafe.scalalogging.LazyLogging import scalapb.{GeneratedMessage, GeneratedMessageCompanion} @@ -18,18 +17,12 @@ trait TracingService[T <: GeneratedMessage] implicit val ec: ExecutionContext - def tracingType: TracingType - def tracingStore: FossilDBClient def temporaryTracingStore: TemporaryTracingStore[T] def temporaryTracingIdStore: TracingStoreRedisStore - def remoteWebknossosClient: TSRemoteWebknossosClient - - def tracingMigrationService: TracingMigrationService[T] - implicit def tracingCompanion: GeneratedMessageCompanion[T] // this should be longer than maxCacheTime in webknossos/AnnotationStore @@ -43,17 +36,6 @@ trait TracingService[T <: GeneratedMessage] protected def temporaryIdKey(tracingId: String) = s"temporaryTracingId___$tracingId" - /* // TODO ? add this to migration? - private def migrateTracing(tracingFox: Fox[T], tracingId: String): Fox[T] = - tracingMigrationService.migrateTracing(tracingFox).flatMap { - case (tracing, hasChanged) => - if (hasChanged) - save(tracing, Some(tracingId), currentVersion(tracing)).map(_ => tracing) - else - Fox.successful(tracing) - } - */ - def save(tracing: T, tracingId: Option[String], version: Long, toTemporaryStore: Boolean = false): Fox[String] = { val id = tracingId.getOrElse(TracingId.generate) if (toTemporaryStore) { From 322ffaaccb6442cca2ef7b5b229aaf1d96fbd0b7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 11:20:13 +0100 Subject: [PATCH 157/361] wip merge editable mappings --- .../annotation/TSAnnotationService.scala | 18 +++++++++++++++ .../controllers/TSAnnotationController.scala | 11 +++++++--- .../EditableMappingService.scala | 2 +- .../volume/VolumeTracingService.scala | 22 +++---------------- 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 66675d8d335..dd4b18264b4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -768,4 +768,22 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId + def mergeEditableMappings(newTracingId: String, + tracingsWithIds: List[(VolumeTracing, String)], + linearlizedUpdates: List[UpdateAction], + persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { + for { + _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" + remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => + remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) + remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox + _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" + // _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + } yield () + } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { + Fox.empty + } else { + Fox.failure("Cannot merge annotations with and without editable mappings") + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 0a8fb5081fe..f28d3122d50 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -23,6 +23,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, ResetToBaseAnnotationAction, TSAnnotationService, + UpdateAction, UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService @@ -175,7 +176,8 @@ class TSAnnotationController @Inject()( .findUniqueElement(volumeLayers.map(_.name)) .getOrElse(AnnotationLayer.defaultVolumeLayerName) // TODO Merge updates? if so, iron out reverts? - // TODO Merge editable mappings + linearlizedUpdates: List[UpdateAction] = ??? + // TODO if persist, store the updates volumeTracings <- annotationService .findMultipleVolumes(volumeLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -186,8 +188,11 @@ class TSAnnotationController @Inject()( newVolumeId, newVersion = 0L, persist = persist) - mergeEditableMappingsResultBox <- volumeTracingService - .mergeEditableMappings(newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), persist) + mergeEditableMappingsResultBox <- annotationService + .mergeEditableMappings(newVolumeId, + volumeTracings.zip(volumeLayers.map(_.tracingId)), + linearlizedUpdates, + persist) .futureBox newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { case Full(()) => Fox.successful(Some(newVolumeId)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 53552cc74b1..85d85383873 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -554,6 +554,6 @@ class EditableMappingService @Inject()( updateActionsWithVersion._2) } } yield () - */ + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 02902f222ce..b83a37d87f0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -25,6 +25,7 @@ import com.scalableminds.webknossos.datastore.models.{ import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, @@ -36,6 +37,7 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files import play.api.libs.Files.TemporaryFileCreator + import java.io._ import java.nio.file.Paths import java.util.Base64 @@ -56,7 +58,7 @@ class VolumeTracingService @Inject()( val remoteDatastoreClient: TSRemoteDatastoreClient, val remoteWebknossosClient: TSRemoteWebknossosClient, val temporaryFileCreator: TemporaryFileCreator, - val tracingMigrationService: VolumeTracingMigrationService, + editableMappingService: EditableMappingService, volumeSegmentIndexService: VolumeSegmentIndexService ) extends TracingService[VolumeTracing] with VolumeTracingBucketHelper @@ -867,24 +869,6 @@ class VolumeTracingService @Inject()( } } - def mergeEditableMappings(newTracingId: String, - tracingsWithIds: List[(VolumeTracing, String)], - persist: Boolean): Fox[Unit] = - if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { - for { - _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" - remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => - remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) - remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox - _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" - // TODO _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) - } yield () - } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { - Fox.empty - } else { - Fox.failure("Cannot merge tracings with and without editable mappings") - } - def getFallbackLayer(tracingId: String, tracing: VolumeTracing)( implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = fallbackLayerCache.getOrLoad((tracingId, tracing.fallbackLayer, tc.userTokenOpt), From 1cc077483be9fb73d3084d8cce9e7a9f2e5fc8e6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 11:46:55 +0100 Subject: [PATCH 158/361] wip mergedFromContents --- app/controllers/AnnotationController.scala | 1 + app/controllers/AnnotationIOController.scala | 10 ++++++++++ app/models/annotation/AnnotationService.scala | 1 - 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index b0fbcedabbe..1642eacc3ab 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -448,6 +448,7 @@ class AnnotationController @Inject()( None, annotation.description, newAnnotationId) ?~> Messages("annotation.create.failed") + _ <- annotationDAO.insertOne(annotation) } yield clonedAnnotation def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 42a17b90a74..10d81c5b896 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -10,6 +10,7 @@ import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits @@ -148,6 +149,15 @@ class AnnotationIOController @Inject()( name, description, ObjectId.generate) + annotationProto = AnnotationProto( + name = Some(annotation.name), + description = Some(annotation.description), + version = 0L, + annotationLayers = annotation.annotationLayers.map(_.toProto), + earliestAccessibleVersion = 0L + ) + _ <- tracingStoreClient.saveAnnotationProto(annotation._id, annotationProto) + _ <- annotationDAO.insertOne(annotation) _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) } yield JsonOk( diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 8cee2a6f915..4b100e42132 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -504,7 +504,6 @@ class AnnotationService @Inject()( description, name = name.getOrElse(""), typ = annotationType) - _ <- annotationDAO.insertOne(annotation) } yield annotation def updateTeamsForSharedAnnotation(annotationId: ObjectId, teams: List[ObjectId])( From 4b6ccc056b4c3a45e5ba62badaac4c5e552a97c0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 12:55:01 +0100 Subject: [PATCH 159/361] wip merged from contents --- app/controllers/AnnotationIOController.scala | 1 + .../WKRemoteTracingStoreClient.scala | 4 +- .../controllers/VolumeTracingController.scala | 48 ++++++++++++++----- ...alableminds.webknossos.tracingstore.routes | 2 +- 4 files changed, 41 insertions(+), 14 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 10d81c5b896..bc6d7435bba 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -137,6 +137,7 @@ class AnnotationIOController @Inject()( usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) tracingStoreClient <- tracingStoreService.clientFor(dataset) + // TODO ordering. id is looked up in postgres to implement initialdata. cyclic! mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, tracingStoreClient, parsedFiles.otherFiles, diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index eb516b9ed5d..1afe4170a1c 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -177,11 +177,13 @@ class WKRemoteTracingStoreClient( def saveVolumeTracing(tracing: VolumeTracing, initialData: Option[File] = None, magRestrictions: MagRestrictions = MagRestrictions.empty, - dataSource: Option[DataSourceLike] = None): Fox[String] = { + dataSource: Option[DataSourceLike] = None, + newTracingId: Option[String] = None): Fox[String] = { logger.debug("Called to create VolumeTracing." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/save") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryStringOptional("newTracingId", newTracingId) .postProtoWithJsonResponse[VolumeTracing, String](tracing) _ = dataSource.foreach(d => tracingDataSourceTemporaryStore.store(tracingId, d)) _ <- initialData match { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index b3683464bff..14aa241805d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -8,17 +8,40 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, Volu import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.helpers.{GetSegmentIndexParameters, ProtoGeometryImplicits, SegmentStatisticsParameters} +import com.scalableminds.webknossos.datastore.helpers.{ + GetSegmentIndexParameters, + ProtoGeometryImplicits, + SegmentStatisticsParameters +} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize, WebknossosAdHocMeshRequest, WebknossosDataRequest} +import com.scalableminds.webknossos.datastore.models.{ + LengthUnit, + VoxelSize, + WebknossosAdHocMeshRequest, + WebknossosDataRequest +} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ImportVolumeDataVolumeAction, MagRestrictions, MergedVolumeStats, TSFullMeshService, VolumeDataZipFormat, VolumeSegmentIndexService, VolumeSegmentStatisticsService, VolumeTracingService} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ImportVolumeDataVolumeAction, + MagRestrictions, + MergedVolumeStats, + TSFullMeshService, + VolumeDataZipFormat, + VolumeSegmentIndexService, + VolumeSegmentStatisticsService, + VolumeTracingService +} import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} -import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig} +import com.scalableminds.webknossos.tracingstore.{ + TSRemoteDatastoreClient, + TSRemoteWebknossosClient, + TracingStoreAccessTokenService, + TracingStoreConfig +} import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile @@ -58,17 +81,18 @@ class VolumeTracingController @Inject()( implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def save(): Action[VolumeTracing] = Action.async(validateProto[VolumeTracing]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { - val tracing = request.body - volumeTracingService.save(tracing, None, 0).map { newId => - Ok(Json.toJson(newId)) + def save(newTracingId: Option[String]): Action[VolumeTracing] = Action.async(validateProto[VolumeTracing]) { + implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { + val tracing = request.body + volumeTracingService.save(tracing, newTracingId, 0).map { newId => + Ok(Json.toJson(newId)) + } } } } - } } def saveMultiple(): Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 5bb6a936629..4057054f553 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -16,7 +16,7 @@ POST /annotation/:annotationId/resetToBase POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(persist: Boolean, newAnnotationId: String) # Volume tracings -POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save() +POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) From 69d1a1655b2ff809fa3f4d108a59c5bebd2c2001 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 14:05:38 +0100 Subject: [PATCH 160/361] create editable mapping info when applying UpdateMappingName action --- .../annotation/TSAnnotationService.scala | 14 ++-- .../EditableMappingController.scala | 68 +++---------------- .../EditableMappingService.scala | 8 +-- 3 files changed, 22 insertions(+), 68 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index dd4b18264b4..648984fe6c7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -250,15 +250,21 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss action: UpdateMappingNameVolumeAction, targetVersion: Long)(implicit tc: TokenContext, ec: ExecutionContext): Fox[AnnotationWithTracings] = for { - editableMappingInfo <- getEditableMappingInfoFromStore(action.actionTracingId, annotationWithTracings.version) volumeTracing <- annotationWithTracings.getVolume(action.actionTracingId).toFox + _ <- assertMappingIsNotLocked(volumeTracing) + baseMappingName <- volumeTracing.mappingName.toFox ?~> "makeEditable.failed.noBaseMapping" + _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(action.actionTracingId)) ?~> "annotation.volumeBucketsNotEmpty" + editableMappingInfo = editableMappingService.create(baseMappingName) updater <- editableMappingUpdaterFor(annotationId, action.actionTracingId, volumeTracing, - editableMappingInfo.value, + editableMappingInfo, annotationWithTracings.version, targetVersion) - } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo.value, updater) + } yield annotationWithTracings.addEditableMapping(action.actionTracingId, editableMappingInfo, updater) + + private def assertMappingIsNotLocked(volumeTracing: VolumeTracing)(implicit ec: ExecutionContext): Fox[Unit] = + bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" private def applyPendingUpdates( annotation: AnnotationProto, @@ -771,7 +777,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def mergeEditableMappings(newTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], linearlizedUpdates: List[UpdateAction], - persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + persist: Boolean)(implicit ec: ExecutionContext): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index a1e6fc9a3b7..44f0724c5e6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -4,33 +4,31 @@ import com.google.inject.Inject import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, MinCutParameters, NeighborsParameters } -import com.scalableminds.webknossos.tracingstore.tracings.volume.{UpdateMappingNameVolumeAction, VolumeTracingService} +import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext -class EditableMappingController @Inject()(volumeTracingService: VolumeTracingService, - annotationService: TSAnnotationService, - remoteWebknossosClient: TSRemoteWebknossosClient, - accessTokenService: TracingStoreAccessTokenService, - editableMappingService: EditableMappingService, - annotationTransactionService: AnnotationTransactionService)( - implicit ec: ExecutionContext, - bodyParsers: PlayBodyParsers) +class EditableMappingController @Inject()( + volumeTracingService: VolumeTracingService, + annotationService: TSAnnotationService, + remoteWebknossosClient: TSRemoteWebknossosClient, + accessTokenService: TracingStoreAccessTokenService, + editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { + // TODO remove once frontend sends update action def makeMappingEditable(tracingId: String): Action[AnyContent] = Action.async { implicit request => log() { @@ -39,59 +37,13 @@ class EditableMappingController @Inject()(volumeTracingService: VolumeTracingSer annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" - _ <- assertMappingIsNotLocked(tracing) - _ <- bool2Fox(volumeTracingService.volumeBucketsAreEmpty(tracingId)) ?~> "annotation.volumeBucketsNotEmpty" - editableMappingInfo <- editableMappingService.create(tracingId, baseMappingName = tracingMappingName) - volumeUpdate = UpdateMappingNameVolumeAction(Some(tracingId), - isEditable = Some(true), - isLocked = Some(true), - actionTracingId = tracingId, - actionTimestamp = Some(System.currentTimeMillis())) - _ <- annotationTransactionService - .handleSingleUpdateAction( // TODO replace this route by the update action only? - annotationId, - tracing.version, - volumeUpdate) + editableMappingInfo = editableMappingService.create(tracingMappingName) infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) } yield Ok(infoJson) } } } - private def assertMappingIsNotLocked(volumeTracing: VolumeTracing): Fox[Unit] = - bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" - - /*// TODO integrate all of this into annotation update - - def updateEditableMapping( - annotationId: String, - tracingId: String): Action[List[UpdateActionGroup]] = - Action.async(validateJson[List[UpdateActionGroup]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeTracing(tracingId)) { - for { - tracing <- annotationService.findVolume(annotationId, tracingId) - mappingName <- tracing.mappingName.toFox - _ <- editableMappingService.assertTracingHasEditableMapping(tracing) - currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) - _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" - updateGroup <- request.body.headOption.toFox - _ <- bool2Fox(updateGroup.version == currentVersion + 1) ?~> "version mismatch" - report = TracingUpdatesReport( - annotationId, // TODO integrate all of this into annotation update - timestamps = List(Instant(updateGroup.timestamp)), - statistics = None, - significantChangesCount = updateGroup.actions.length, - viewChangesCount = 0, - tokenContextForRequest.userTokenOpt - ) - _ <- remoteWebknossosClient.reportTracingUpdates(report) - remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version, remoteFallbackLayer) - } yield Ok - } - } - */ - def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 85d85383873..973e811f204 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -124,16 +124,12 @@ class EditableMappingService @Inject()( "createdTimestamp" -> editableMappingInfo.createdTimestamp ) - def create(tracingId: String, baseMappingName: String): Fox[EditableMappingInfo] = { - val newEditableMappingInfo = EditableMappingInfo( + def create(baseMappingName: String): EditableMappingInfo = + EditableMappingInfo( baseMappingName = baseMappingName, createdTimestamp = Instant.now.epochMillis, largestAgglomerateId = 0L ) - for { - _ <- tracingDataStore.editableMappingsInfo.put(tracingId, 0L, toProtoBytes(newEditableMappingInfo)) - } yield newEditableMappingInfo - } def duplicateSegmentToAgglomerate(sourceTracingId: String, newId: String, From 5ec9697b9b6ff16f127b87c5542fe54b996624d4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 14:48:21 +0100 Subject: [PATCH 161/361] batching in duplicate updates --- app/controllers/AnnotationController.scala | 2 +- .../scala/collections/SequenceUtils.scala | 9 + .../annotation/TSAnnotationService.scala | 155 +++++++++--------- 3 files changed, 88 insertions(+), 78 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 1642eacc3ab..5cf5a39cc7d 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -448,7 +448,7 @@ class AnnotationController @Inject()( None, annotation.description, newAnnotationId) ?~> Messages("annotation.create.failed") - _ <- annotationDAO.insertOne(annotation) + _ <- annotationDAO.insertOne(clonedAnnotation) } yield clonedAnnotation def tryAcquiringAnnotationMutex(id: String): Action[AnyContent] = diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index a584ae2923d..c7b5f1138fb 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -35,4 +35,13 @@ object SequenceUtils { .reverse // we prepended on the outer list (for perf reasons) .map(_.reverse) // we prepended on the inner lists (for perf reasons) + /* + // TODO: Comment + */ + def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = + (0L to ((to - from) / batchSize)).map { batchIndex => + val batchFrom = batchIndex * batchSize + from + val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) + (batchFrom, batchTo) + } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 648984fe6c7..ee7ff4a703b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.annotation +import collections.SequenceUtils import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} @@ -122,17 +123,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" } yield keyValuePair.value - private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( - implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = - if (desiredVersion == existingVersion) Fox.successful(List()) - else { - tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( - annotationId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) - } - - // TODO option to dry apply? private def applyUpdate( annotationId: String, annotationWithTracings: AnnotationWithTracings, @@ -222,7 +212,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss "value" -> Json.toJson(tuple._2) ) - val batchRanges = batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) + val batchRanges = SequenceUtils.batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) for { updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => val batchFrom = batchRange._1 @@ -243,7 +233,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracing <- annotation.getEditableMappingInfo(tracingId) ?~> "getEditableMapping.failed" } yield tracing - // TODO move the functions that construct the AnnotationWithTracigns elsewhere to keep this file smaller? private def addEditableMapping( annotationId: String, annotationWithTracings: AnnotationWithTracings, @@ -286,6 +275,41 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss reportChangesToWk) ?~> "applyUpdates.inner.failed" } yield updated + private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (desiredVersion == existingVersion) Fox.successful(List()) + else { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } + + private def findTracingsForAnnotation(annotation: AnnotationProto)( + implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { + val skeletonTracingIds = + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + val volumeTracingIds = + annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) + for { + skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)( + id => + tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( + fromProtoBytes[SkeletonTracing])) + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)( + id => + tracingDataStore.volumes + .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) + _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") + skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds + .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds + .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) + .toMap + } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) + } + private def findEditableMappingsForAnnotation( annotationId: String, annotationWithTracings: AnnotationWithTracings, @@ -339,34 +363,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss relyOnAgglomerateIds = false // TODO should we? ) - private def findTracingsForAnnotation(annotation: AnnotationProto)( - implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { - val skeletonTracingIds = - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) - - val volumeTracingIds = - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) - - logger.info(s"fetching volumes $volumeTracingIds and skeletons $skeletonTracingIds") - for { - skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)( - id => - tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( - fromProtoBytes[SkeletonTracing])) - volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)( - id => - tracingDataStore.volumes - .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) - _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") - skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds - .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) - .toMap - volumeTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = volumeTracingIds - .zip(volumeTracings.map(versioned => Right[SkeletonTracing, VolumeTracing](versioned.value))) - .toMap - } yield AnnotationWithTracings(annotation, skeletonTracingsMap ++ volumeTracingsMap, Map.empty) - } - private def applyUpdatesGrouped( annotation: AnnotationWithTracings, annotationId: String, @@ -540,13 +536,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield Some(editableMappingInfo.baseMappingName) else Fox.successful(tracing.mappingName) - private def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = - (0L to ((to - from) / batchSize)).map { batchIndex => - val batchFrom = batchIndex * batchSize + from - val batchTo = Math.min(to, (batchIndex + 1) * batchSize + from - 1) - (batchFrom, batchTo) - } - def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None, @@ -615,9 +604,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss datasetBoundingBox: Option[BoundingBox])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { v0Annotation <- get(annotationId, Some(0L)) + currentAnnotation <- get(annotationId, version) // Duplicate updates - tracingIdMap <- duplicateUpdates(annotationId, newAnnotationId, v0Annotation.annotationLayers.map(_.tracingId)) + tracingIdMap <- duplicateUpdates(annotationId, + newAnnotationId, + v0Annotation.annotationLayers.map(_.tracingId), + currentAnnotation.version) // Duplicate v0 v0NewLayers <- Fox.serialCombined(v0Annotation.annotationLayers)(layer => @@ -628,7 +621,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.annotations.put(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) // Duplicate current - currentAnnotation <- get(annotationId, version) newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => duplicateLayer(annotationId, layer, tracingIdMap, currentAnnotation.version, isFromTask, datasetBoundingBox)) duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, @@ -637,38 +629,47 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield duplicatedAnnotation - private def duplicateUpdates(annotationId: String, newAnnotationId: String, v0TracingIds: Seq[String])( - implicit ec: ExecutionContext): Fox[Map[String, String]] = { + private def duplicateUpdates(annotationId: String, + newAnnotationId: String, + v0TracingIds: Seq[String], + newestVersion: Long)(implicit ec: ExecutionContext): Fox[Map[String, String]] = { val tracingIdMapMutable = scala.collection.mutable.Map[String, String]() v0TracingIds.foreach { v0TracingId => tracingIdMapMutable.put(v0TracingId, TracingId.generate) } - // TODO memory: batch - - for { - updateLists: Seq[(Long, List[UpdateAction])] <- tracingDataStore.annotationUpdates - .getMultipleVersionsAsVersionValueTuple(annotationId)(fromJsonBytes[List[UpdateAction]]) - _ <- Fox.serialCombined(updateLists) { - case (version, updateList) => - for { - updateListAdapted <- Fox.serialCombined(updateList) { - case a: AddLayerAnnotationAction => - for { - actionTracingId <- a.tracingId ?~> "duplicating addLayer without tracingId" - _ = if (!tracingIdMapMutable.contains(actionTracingId)) { - a.tracingId.foreach(actionTracingId => tracingIdMapMutable.put(actionTracingId, TracingId.generate)) - } - mappedTracingId <- tracingIdMapMutable.get(actionTracingId) ?~> "duplicating action for unknown layer" - } yield a.copy(tracingId = Some(mappedTracingId)) - case a: LayerUpdateAction => - for { - mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" - } yield a.withActionTracingId(mappedTracingId) - } - _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) - } yield () + val updateBatchRanges = SequenceUtils.batchRangeInclusive(0L, newestVersion, batchSize = 100) + Fox + .serialCombined(updateBatchRanges.toList) { batchRange => + for { + updateLists: Seq[(Long, List[UpdateAction])] <- tracingDataStore.annotationUpdates + .getMultipleVersionsAsVersionValueTuple( + annotationId, + oldestVersion = Some(batchRange._1), + newestVersion = Some(batchRange._2))(fromJsonBytes[List[UpdateAction]]) + _ <- Fox.serialCombined(updateLists) { + case (version, updateList) => + for { + updateListAdapted <- Fox.serialCombined(updateList) { + case a: AddLayerAnnotationAction => + for { + actionTracingId <- a.tracingId ?~> "duplicating addLayer without tracingId" + _ = if (!tracingIdMapMutable.contains(actionTracingId)) { + a.tracingId.foreach(actionTracingId => + tracingIdMapMutable.put(actionTracingId, TracingId.generate)) + } + mappedTracingId <- tracingIdMapMutable.get(actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.copy(tracingId = Some(mappedTracingId)) + case a: LayerUpdateAction => + for { + mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" + } yield a.withActionTracingId(mappedTracingId) + } + _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) + } yield () + } + } yield () } - } yield tracingIdMapMutable.toMap + .map(_ => tracingIdMapMutable.toMap) } private def duplicateLayer(annotationId: String, From f6f83b871755ebeb4efcfa80e0bd35a6b06124bc Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 11 Nov 2024 15:41:48 +0100 Subject: [PATCH 162/361] resolve some small todo comments --- util/src/main/scala/collections/SequenceUtils.scala | 8 +++++++- .../datastore/models/annotation/AnnotationLayer.scala | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index c7b5f1138fb..93978cbd9ba 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -36,7 +36,13 @@ object SequenceUtils { .map(_.reverse) // we prepended on the inner lists (for perf reasons) /* - // TODO: Comment + Create a Seq of Long range tuples, covering a given inclusive Long range. + The individual ranges should be treated as inclusive as well. + Example: + batchRangeInclusive(0,5,3) + → Seq((0,2), (3,5)) + batchRangeInclusive(0,6,2) + → Seq((0,1), (2,3), (4,5), (6,6)) */ def batchRangeInclusive(from: Long, to: Long, batchSize: Long): Seq[(Long, Long)] = (0L to ((to - from) / batchSize)).map { batchIndex => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index b9552941679..4eacd99fe59 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -11,7 +11,6 @@ import scalapb.GeneratedMessage import scala.concurrent.ExecutionContext -// TODO can this be moved back to wk-core backend? case class AnnotationLayer( tracingId: String, typ: AnnotationLayerType, From fbadb937fe77d5710bd5b73e49c706ac3a1c61dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 11 Nov 2024 18:36:56 +0100 Subject: [PATCH 163/361] adjust frontend editable mapping initialization --- .../oxalis/model/sagas/proofread_saga.ts | 32 ++++++++++--------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index c10b35693f0..9421e2222ad 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -33,10 +33,7 @@ import { getTreeNameForAgglomerateSkeleton, isSkeletonLayerTransformed, } from "oxalis/model/accessors/skeletontracing_accessor"; -import { - pushSaveQueueTransaction, - setVersionNumberAction, -} from "oxalis/model/actions/save_actions"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { splitAgglomerate, mergeAgglomerate, @@ -62,7 +59,6 @@ import { getEdgesForAgglomerateMinCut, getNeighborsForAgglomerateNode, getPositionForSegmentInAgglomerate, - makeMappingEditable, } from "admin/admin_rest_api"; import { setMappingAction, setMappingNameAction } from "oxalis/model/actions/settings_actions"; import { getSegmentIdForPositionAsync } from "oxalis/controller/combinations/volume_handlers"; @@ -78,7 +74,7 @@ import { } from "oxalis/model/actions/annotation_actions"; import type { ActiveMappingInfo, Mapping, NumberLikeMap, Tree, VolumeTracing } from "oxalis/store"; import _ from "lodash"; -import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { AdditionalCoordinate, ServerEditableMapping } from "types/api_flow_types"; import { takeEveryUnlessBusy } from "./saga_helpers"; import type { Action } from "../actions/actions"; import { isBigInt, isNumberMap, SoftError } from "libs/utils"; @@ -264,24 +260,30 @@ function* createEditableMapping(): Saga { * Returns the name of the editable mapping. This is not identical to the * name of the HDF5 mapping for which the editable mapping is about to be created. */ - const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); - // Save before making the mapping editable to make sure the correct mapping is activated in the backend - yield* call([Model, Model.ensureSavedState]); // Get volume tracing again to make sure the version is up to date - const tracing = yield* select((state) => state.tracing); + const volumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); + if (!volumeTracing || !volumeTracing.mappingName) { + // This should never occur, because the proofreading tool is only available when a volume tracing layer is active. + throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); + } const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (upToDateVolumeTracing == null) { - throw new Error("No active segmentation tracing layer. Cannot create editble mapping."); + throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); } const volumeTracingId = upToDateVolumeTracing.tracingId; const layerName = volumeTracingId; - const serverEditableMapping = yield* call(makeMappingEditable, tracingStoreUrl, volumeTracingId); - // The server increments the volume tracing's version by 1 when switching the mapping to an editable one - yield* put(setVersionNumberAction(tracing.version + 1)); + const baseMappingName = volumeTracing.mappingName; yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); + // Ensure the backend receives the correct mapping nam, which makes the mapping editable before doing the first proofreading operation. + yield* call([Model, Model.ensureSavedState]); yield* put(setHasEditableMappingAction()); - yield* put(initializeEditableMappingAction(serverEditableMapping)); + const editableMapping: ServerEditableMapping = { + baseMappingName: baseMappingName, + tracingId: volumeTracingId, + createdTimestamp: Date.now(), + }; + yield* put(initializeEditableMappingAction(editableMapping)); return volumeTracingId; } From 04dccff1a98b27bf9c764eac30bc3bbdb70907f5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 12 Nov 2024 09:22:22 +0100 Subject: [PATCH 164/361] first set mapping to editable before forcing sending all updates ton the backend --- frontend/javascripts/oxalis/model/sagas/proofread_saga.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 9421e2222ad..7aa295aa711 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -275,9 +275,9 @@ function* createEditableMapping(): Saga { const layerName = volumeTracingId; const baseMappingName = volumeTracing.mappingName; yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); - // Ensure the backend receives the correct mapping nam, which makes the mapping editable before doing the first proofreading operation. - yield* call([Model, Model.ensureSavedState]); yield* put(setHasEditableMappingAction()); + // Ensure the backend receives the correct mapping name, the fact that the mapping is locked and editable before doing the first proofreading operation. + yield* call([Model, Model.ensureSavedState]); const editableMapping: ServerEditableMapping = { baseMappingName: baseMappingName, tracingId: volumeTracingId, From 2b3c841a4c18e3da806d974908eecac452b09f2d Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 09:40:58 +0100 Subject: [PATCH 165/361] re-connect initializeWithData --- app/controllers/AnnotationIOController.scala | 1 - .../annotation/TSAnnotationService.scala | 19 +++++++++++-------- .../controllers/VolumeTracingController.scala | 18 ++++++++++-------- .../volume/VolumeTracingService.scala | 2 +- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index bc6d7435bba..10d81c5b896 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -137,7 +137,6 @@ class AnnotationIOController @Inject()( usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) tracingStoreClient <- tracingStoreService.clientFor(dataset) - // TODO ordering. id is looked up in postgres to implement initialdata. cyclic! mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, tracingStoreClient, parsedFiles.otherFiles, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ee7ff4a703b..a56e8464a06 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -292,14 +292,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss val volumeTracingIds = annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) for { - skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)( - id => - tracingDataStore.skeletons.get[SkeletonTracing](id, Some(annotation.version), mayBeEmpty = Some(true))( - fromProtoBytes[SkeletonTracing])) - volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)( - id => - tracingDataStore.volumes - .get[VolumeTracing](id, Some(annotation.version), mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing])) + skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)(id => + findSkeletonRaw(id, Some(annotation.version))) + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)(id => findVolumeRaw(id, Some(annotation.version))) _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) @@ -536,6 +531,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield Some(editableMappingInfo.baseMappingName) else Fox.successful(tracing.mappingName) + def findVolumeRaw(tracingId: String, version: Option[Long] = None): Fox[VersionedKeyValuePair[VolumeTracing]] = + tracingDataStore.volumes + .get[VolumeTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing]) + + private def findSkeletonRaw(tracingId: String, version: Option[Long]): Fox[VersionedKeyValuePair[SkeletonTracing]] = + tracingDataStore.volumes + .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) + def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 14aa241805d..377e5563fb2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -143,12 +143,14 @@ class VolumeTracingController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + // The annotation object may not yet exist here. Caller is responsible to save that too. + tracing <- annotationService.findVolumeRaw(tracingId) ?~> Messages("tracing.notFound") magRestrictions = MagRestrictions(minMag, maxMag) - mags <- volumeTracingService.initializeWithData(tracingId, tracing, initialData, magRestrictions).toFox - _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) + mags <- volumeTracingService + .initializeWithData(tracingId, tracing.value, initialData, magRestrictions) + .toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing.value, mags) } yield Ok(Json.toJson(tracingId)) } } @@ -182,11 +184,11 @@ class VolumeTracingController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") - mags <- volumeTracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox - _ <- volumeTracingService.updateMagList(tracingId, tracing, mags) + // The annotation object may not yet exist here. Caller is responsible to save that too. + tracing <- annotationService.findVolumeRaw(tracingId) ?~> Messages("tracing.notFound") + mags <- volumeTracingService.initializeWithDataMultiple(tracingId, tracing.value, initialData).toFox + _ <- volumeTracingService.updateMagList(tracingId, tracing.value, mags) } yield Ok(Json.toJson(tracingId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b83a37d87f0..b2c27133e86 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -600,7 +600,7 @@ class VolumeTracingService @Inject()( toTemporaryStore: Boolean = false): Fox[String] = for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." - _ <- bool2Fox(mags.nonEmpty) ?~> "Mag restrictions result in zero mags" + _ <- bool2Fox(mags.nonEmpty) ?~> "Initializing without any mags. No data or mag restrictions too tight?" id <- save(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), Some(tracingId), tracing.version, From 184eff48aae85d2deab48698ea0e53fab28de3bd Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 09:53:16 +0100 Subject: [PATCH 166/361] loglevel for pekko.event --- conf/logback-dev.xml | 2 +- .../tracingstore/annotation/TSAnnotationService.scala | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/conf/logback-dev.xml b/conf/logback-dev.xml index e8aa9cb9f8a..9b7641e737d 100644 --- a/conf/logback-dev.xml +++ b/conf/logback-dev.xml @@ -17,7 +17,7 @@ - + diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index a56e8464a06..8d78b468e0a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -263,7 +263,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) - annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForUpdates.failed" + annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForAnnotation.failed" annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( annotationId, annotationWithTracings, @@ -293,8 +293,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) for { skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)(id => - findSkeletonRaw(id, Some(annotation.version))) - volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)(id => findVolumeRaw(id, Some(annotation.version))) + findSkeletonRaw(id, Some(annotation.version))) ?~> "findSkeletonRaw.failed" + volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)(id => findVolumeRaw(id, Some(annotation.version))) ?~> "findVolumeRaw.failed" _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) From a07404bb77b99c3d0e0959e61d337c7f220eb717 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 09:58:58 +0100 Subject: [PATCH 167/361] fix skeleton lookup --- .../tracingstore/annotation/TSAnnotationService.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8d78b468e0a..d6bc080930e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -295,7 +295,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)(id => findSkeletonRaw(id, Some(annotation.version))) ?~> "findSkeletonRaw.failed" volumeTracings <- Fox.serialCombined(volumeTracingIds.toList)(id => findVolumeRaw(id, Some(annotation.version))) ?~> "findVolumeRaw.failed" - _ = logger.info(s"fetched ${skeletonTracings.length} skeletons and ${volumeTracings.length} volumes") skeletonTracingsMap: Map[String, Either[SkeletonTracing, VolumeTracing]] = skeletonTracingIds .zip(skeletonTracings.map(versioned => Left[SkeletonTracing, VolumeTracing](versioned.value))) .toMap @@ -536,7 +535,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss .get[VolumeTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[VolumeTracing]) private def findSkeletonRaw(tracingId: String, version: Option[Long]): Fox[VersionedKeyValuePair[SkeletonTracing]] = - tracingDataStore.volumes + tracingDataStore.skeletons .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) def findVolume(annotationId: String, From ab056ce3fdbfd85cf08b44b8b763536814d19d14 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 10:20:37 +0100 Subject: [PATCH 168/361] wip merge editable mappings --- .../annotation/TSAnnotationService.scala | 26 ++++++++++++++++--- .../controllers/TSAnnotationController.scala | 8 ++---- .../EditableMappingUpdateActions.scala | 8 +++--- 3 files changed, 30 insertions(+), 12 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index d6bc080930e..f2afd7c97ae 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -390,7 +390,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss logger.info(s"applying ${updates.length} to go from v${annotation.version} to v$targetVersion") - // TODO can we make this tail recursive? def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = annotationWithTracingsFox.futureBox.flatMap { @@ -777,9 +776,27 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - def mergeEditableMappings(newTracingId: String, + private def ironUpdates(updateGroups: Seq[(Long, List[UpdateAction])]): Seq[UpdateAction] = ??? + + def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( + implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = + for { + updatesByAnnotation <- Fox.serialCombined(annotationIds) { annotationId => + for { + updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( + fromJsonBytes[List[UpdateAction]]) + updatesIroned: Seq[UpdateAction] = ironUpdates(updateGroups) + editableMappingUpdates = updatesIroned.flatMap { + case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) + case _ => None + } + } yield editableMappingUpdates + } + } yield updatesByAnnotation.flatten + + def mergeEditableMappings(annotationIds: List[String], + newVolumeTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], - linearlizedUpdates: List[UpdateAction], persist: Boolean)(implicit ec: ExecutionContext): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { @@ -788,6 +805,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" + linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, + newVolumeTracingId) + // TODO if persist, store the linearized updates // _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index f28d3122d50..f68d518f812 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -23,7 +23,6 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, ResetToBaseAnnotationAction, TSAnnotationService, - UpdateAction, UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService @@ -175,9 +174,6 @@ class TSAnnotationController @Inject()( mergedVolumeName = SequenceUtils .findUniqueElement(volumeLayers.map(_.name)) .getOrElse(AnnotationLayer.defaultVolumeLayerName) - // TODO Merge updates? if so, iron out reverts? - linearlizedUpdates: List[UpdateAction] = ??? - // TODO if persist, store the updates volumeTracings <- annotationService .findMultipleVolumes(volumeLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -189,9 +185,9 @@ class TSAnnotationController @Inject()( newVersion = 0L, persist = persist) mergeEditableMappingsResultBox <- annotationService - .mergeEditableMappings(newVolumeId, + .mergeEditableMappings(request.body, + newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), - linearlizedUpdates, persist) .futureBox newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 9cd4ae51d32..1e843d24bce 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -5,7 +5,9 @@ import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, import play.api.libs.json.Format.GenericFormat import play.api.libs.json._ -trait EditableMappingUpdateAction extends LayerUpdateAction +trait EditableMappingUpdateAction extends LayerUpdateAction { + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction +} // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. @@ -24,7 +26,7 @@ case class SplitAgglomerateUpdateAction(agglomerateId: Long, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def withActionTracingId(newTracingId: String): LayerUpdateAction = + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction = this.copy(actionTracingId = newTracingId) } @@ -50,7 +52,7 @@ case class MergeAgglomerateUpdateAction(agglomerateId1: Long, override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) override def addAuthorId(authorId: Option[String]): UpdateAction = this.copy(actionAuthorId = authorId) - override def withActionTracingId(newTracingId: String): LayerUpdateAction = + override def withActionTracingId(newTracingId: String): EditableMappingUpdateAction = this.copy(actionTracingId = newTracingId) } From 689e728c9db0329a0fdf82aed0a618cbe2d40911 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 10:58:17 +0100 Subject: [PATCH 169/361] instantiate updater --- .../annotation/TSAnnotationService.scala | 67 +++++++++++++------ .../controllers/TSAnnotationController.scala | 1 + .../EditableMappingService.scala | 41 ------------ 3 files changed, 49 insertions(+), 60 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index f2afd7c97ae..ef1182d66a8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -4,6 +4,7 @@ import collections.SequenceUtils import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.Annotation.{ @@ -39,6 +40,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ import com.scalableminds.webknossos.tracingstore.tracings.{ FallbackDataHelper, KeyValueStoreImplicits, + RemoteFallbackLayer, SkeletonTracingMigrationService, TracingDataStore, TracingId, @@ -332,6 +334,27 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( fromProtoBytes[EditableMappingInfo]) + private def editableMappingUpdaterFor(annotationId: String, + tracingId: String, + remoteFallbackLayer: RemoteFallbackLayer, + editableMappingInfo: EditableMappingInfo, + currentMaterializedVersion: Long, + targetVersion: Long)(implicit tc: TokenContext): EditableMappingUpdater = + new EditableMappingUpdater( + annotationId, + tracingId, + editableMappingInfo.baseMappingName, + currentMaterializedVersion, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + this, + tracingDataStore, + relyOnAgglomerateIds = false // TODO should we? + ) + private def editableMappingUpdaterFor( annotationId: String, tracingId: String, @@ -342,20 +365,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(volumeTracing, tracingId) } yield - new EditableMappingUpdater( - annotationId, - tracingId, - editableMappingInfo.baseMappingName, - currentMaterializedVersion, - targetVersion, - remoteFallbackLayer, - tc, - remoteDatastoreClient, - editableMappingService, - this, - tracingDataStore, - relyOnAgglomerateIds = false // TODO should we? - ) + editableMappingUpdaterFor(annotationId, + tracingId, + remoteFallbackLayer, + editableMappingInfo, + currentMaterializedVersion, + targetVersion) private def applyUpdatesGrouped( annotation: AnnotationWithTracings, @@ -776,16 +791,18 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - private def ironUpdates(updateGroups: Seq[(Long, List[UpdateAction])]): Seq[UpdateAction] = ??? + private def ironOutReversions(updateGroups: Seq[(Long, List[UpdateAction])]): Seq[UpdateAction] = + updateGroups.flatMap(_._2) // TODO iron out reversions. + // TODO: note: in each group the updates might have to be reversed? - def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( + private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = for { updatesByAnnotation <- Fox.serialCombined(annotationIds) { annotationId => for { updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( fromJsonBytes[List[UpdateAction]]) - updatesIroned: Seq[UpdateAction] = ironUpdates(updateGroups) + updatesIroned: Seq[UpdateAction] = ironOutReversions(updateGroups) editableMappingUpdates = updatesIroned.flatMap { case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) case _ => None @@ -795,11 +812,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield updatesByAnnotation.flatten def mergeEditableMappings(annotationIds: List[String], + newAnnotationId: String, newVolumeTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], - persist: Boolean)(implicit ec: ExecutionContext): Fox[Unit] = + persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { + before <- Instant.nowFox _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) @@ -807,12 +826,22 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, newVolumeTracingId) + targetVersion = linearizedEditableMappingUpdates.length // TODO if persist, store the linearized updates - // _ <- editableMappingService.merge(newTracingId, tracingsWithIds.map(_._2), remoteFallbackLayer) + editableMappingInfo = editableMappingService.create(baseMappingName) + updater = editableMappingUpdaterFor(newAnnotationId, + newVolumeTracingId, + remoteFallbackLayer, + editableMappingInfo, + 0L, + targetVersion) + _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) + _ = logger.info(s"Merging ${tracingsWithIds.length} editable mappings took ${Instant.since(before)}") } yield () } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty } else { Fox.failure("Cannot merge annotations with and without editable mappings") } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index f68d518f812..3c7e99e09c2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -186,6 +186,7 @@ class TSAnnotationController @Inject()( persist = persist) mergeEditableMappingsResultBox <- annotationService .mergeEditableMappings(request.body, + newAnnotationId, newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), persist) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 973e811f204..600e6b5349c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -511,45 +511,4 @@ class EditableMappingService @Inject()( neighborNodes } - /* - def merge(newTracingId: String, tracingIds: List[String], remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[Unit] = - for { - firstTracingId <- tracingIds.headOption.toFox - before = Instant.now - _ <- duplicate(firstTracingId, newTracingId, version = None, Some(remoteFallbackLayer)) - _ <- Fox.serialCombined(tracingIds.tail)(tracingId => mergeInto(newTracingId, tracingId, remoteFallbackLayer)) - _ = logger.info(s"Merging ${tracingIds.length} editable mappings took ${Instant.since(before)}") - } yield () - - // read as: merge source into target (mutate target) - private def mergeInto(targetTracingId: String, sourceTracingId: String, remoteFallbackLayer: RemoteFallbackLayer)( - implicit tc: TokenContext): Fox[Unit] = - for { - targetNewestVersion <- getClosestMaterializableVersionOrZero(targetTracingId, None) - sourceNewestMaterializedWithVersion <- getInfoAndActualVersion(sourceTracingId, None, remoteFallbackLayer) - sourceNewestVersion = sourceNewestMaterializedWithVersion._2 - updateActionsWithVersions <- getUpdateActionsWithVersions(sourceTracingId, sourceNewestVersion, 0L) - updateActionsToApply = updateActionsWithVersions.map(_._2).reverse.flatten - updater = new EditableMappingUpdater( - targetTracingId, - sourceNewestMaterializedWithVersion._1.baseMappingName, - targetNewestVersion, - targetNewestVersion + sourceNewestVersion, - remoteFallbackLayer, - tc, - remoteDatastoreClient, - this, - tracingDataStore, - relyOnAgglomerateIds = false - ) - _ <- updater.applyUpdatesAndSave(sourceNewestMaterializedWithVersion._1, updateActionsToApply) - _ <- Fox.serialCombined(updateActionsWithVersions) { updateActionsWithVersion => - tracingDataStore.editableMappingUpdates.put(targetTracingId, - updateActionsWithVersion._1 + targetNewestVersion, - updateActionsWithVersion._2) - } - } yield () - */ - } From 01bc25294447cab3fe54cc561b102e921756685b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 11:29:12 +0100 Subject: [PATCH 170/361] save linearized updates after merging editable mappings --- .../annotation/TSAnnotationService.scala | 30 ++++++++++++----- .../controllers/TSAnnotationController.scala | 33 +++++++++++-------- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index ef1182d66a8..745a760796e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -317,7 +317,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { case (volumeTracing, volumeTracingId) => for { - editableMappingInfo <- getEditableMappingInfoFromStore(volumeTracingId, annotationWithTracings.version) + editableMappingInfo <- getEditableMappingInfoRaw(volumeTracingId, annotationWithTracings.version) updater <- editableMappingUpdaterFor(annotationId, volumeTracingId, volumeTracing, @@ -329,8 +329,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield annotationWithTracings.copy(editableMappingsByTracingId = idInfoUpdaterTuples.toMap) } - private def getEditableMappingInfoFromStore(volumeTracingId: String, - version: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = + private def getEditableMappingInfoRaw(volumeTracingId: String, + version: Long): Fox[VersionedKeyValuePair[EditableMappingInfo]] = tracingDataStore.editableMappingsInfo.get(volumeTracingId, version = Some(version))( fromProtoBytes[EditableMappingInfo]) @@ -815,19 +815,30 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newAnnotationId: String, newVolumeTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], - persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Unit] = + persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { before <- Instant.nowFox _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) - remoteFallbackLayer <- remoteFallbackLayers.headOption.toFox - _ <- bool2Fox(remoteFallbackLayers.forall(_ == remoteFallbackLayer)) ?~> "Cannot merge editable mappings based on different dataset layers" + remoteFallbackLayer <- SequenceUtils.findUniqueElement(remoteFallbackLayers) ?~> "Cannot merge editable mappings based on different dataset layers" + editableMappingInfos <- Fox.serialCombined(tracingsWithIds) { tracingWithId => + tracingDataStore.editableMappingsInfo.get(tracingWithId._2)(fromProtoBytes[EditableMappingInfo]) + } + baseMappingName <- SequenceUtils.findUniqueElement(editableMappingInfos.map(_.value.baseMappingName)) ?~> "Cannot merge editable mappings based on different base mappings" linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, newVolumeTracingId) targetVersion = linearizedEditableMappingUpdates.length - // TODO if persist, store the linearized updates + _ <- Fox.runIf(persist) { + var updateVersion = 1L + Fox.serialCombined(linearizedEditableMappingUpdates) { update: UpdateAction => + for { + _ <- tracingDataStore.annotationUpdates.put(newVolumeTracingId, updateVersion, Json.toJson(List(update))) + _ = updateVersion += 1 + } yield () + } + } editableMappingInfo = editableMappingService.create(baseMappingName) updater = editableMappingUpdaterFor(newAnnotationId, newVolumeTracingId, @@ -836,8 +847,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss 0L, targetVersion) _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) - _ = logger.info(s"Merging ${tracingsWithIds.length} editable mappings took ${Instant.since(before)}") - } yield () + _ = logger.info( + s"Merging ${tracingsWithIds.length} editable mappings took ${Instant.since(before)} (applied ${linearizedEditableMappingUpdates.length} updates)") + } yield targetVersion } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty } else { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 3c7e99e09c2..774fd35c95d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -156,6 +156,7 @@ class TSAnnotationController @Inject()( } } + // TODO test with skeleton-only, volume-only, editable-mapping-volume only def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = Action.async(validateJson[List[String]]) { implicit request => log() { @@ -179,11 +180,6 @@ class TSAnnotationController @Inject()( Some(TracingSelector(l.tracingId)) }, applyUpdates = true) .map(_.flatten) - mergedVolumeStats <- volumeTracingService.mergeVolumeData(volumeLayers.map(_.tracingId), - volumeTracings, - newVolumeId, - newVersion = 0L, - persist = persist) mergeEditableMappingsResultBox <- annotationService .mergeEditableMappings(request.body, newAnnotationId, @@ -191,15 +187,20 @@ class TSAnnotationController @Inject()( volumeTracings.zip(volumeLayers.map(_.tracingId)), persist) .futureBox - newEditableMappingIdOpt <- mergeEditableMappingsResultBox match { - case Full(()) => Fox.successful(Some(newVolumeId)) - case Empty => Fox.successful(None) - case f: Failure => f.toFox + (newMappingName: Option[String], newTargetVersion: Long) <- mergeEditableMappingsResultBox match { + case Full(targetVersion) => Fox.successful((Some(newVolumeId), targetVersion)) + case Empty => Fox.successful((None, 0L)) + case f: Failure => f.toFox } + mergedVolumeStats <- volumeTracingService.mergeVolumeData(volumeLayers.map(_.tracingId), + volumeTracings, + newVolumeId, + newVersion = newTargetVersion, + persist = persist) mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( - volumeTracingService.merge(volumeTracings, mergedVolumeStats, newEditableMappingIdOpt)) + volumeTracingService.merge(volumeTracings, mergedVolumeStats, newMappingName)) _ <- Fox.runOptional(mergedVolumeOpt)( - volumeTracingService.save(_, Some(newVolumeId), version = 0, toTemporaryStore = !persist)) + volumeTracingService.save(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore = !persist)) skeletonTracings <- annotationService .findMultipleSkeletons(skeletonLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -219,10 +220,14 @@ class TSAnnotationController @Inject()( `type` = AnnotationLayerTypeProto.Volume)) mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten firstAnnotation <- annotations.headOption.toFox - mergedAnnotation = firstAnnotation.withAnnotationLayers(mergedLayers) + mergedAnnotation = firstAnnotation + .withAnnotationLayers(mergedLayers) + .withEarliestAccessibleVersion(newTargetVersion) + .withVersion(newTargetVersion) _ <- Fox.runOptional(mergedSkeletonOpt)( - skeletonTracingService.save(_, Some(newSkeletonId), version = 0L, toTemporaryStore = !persist)) - _ <- tracingDataStore.annotations.put(newAnnotationId, 0L, mergedAnnotation) + skeletonTracingService + .save(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore = !persist)) + _ <- tracingDataStore.annotations.put(newAnnotationId, newTargetVersion, mergedAnnotation) } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } } From 39936d4163bdbb4651cac78e0ab9c6e7c00e79a0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 11:34:09 +0100 Subject: [PATCH 171/361] use newVersion also for tracing layers in merged --- .../tracingstore/controllers/SkeletonTracingController.scala | 2 +- .../tracingstore/controllers/TSAnnotationController.scala | 5 +++-- .../tracingstore/controllers/VolumeTracingController.scala | 2 +- .../tracings/skeleton/SkeletonTracingService.scala | 4 ++-- .../tracingstore/tracings/volume/VolumeTracingService.scala | 3 ++- 5 files changed, 9 insertions(+), 7 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 431bc0ead7e..27cff354817 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -99,7 +99,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracings: List[Option[SkeletonTracing]] = request.body for { - mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten)) + mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten, newVersion = 0L)) processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) newId <- skeletonTracingService.save(processedTracing, None, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 774fd35c95d..4e52b335ad0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -198,7 +198,8 @@ class TSAnnotationController @Inject()( newVersion = newTargetVersion, persist = persist) mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( - volumeTracingService.merge(volumeTracings, mergedVolumeStats, newMappingName)) + volumeTracingService + .merge(volumeTracings, mergedVolumeStats, newMappingName, newVersion = newTargetVersion)) _ <- Fox.runOptional(mergedVolumeOpt)( volumeTracingService.save(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore = !persist)) skeletonTracings <- annotationService @@ -207,7 +208,7 @@ class TSAnnotationController @Inject()( }, applyUpdates = true) .map(_.flatten) mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( - skeletonTracingService.merge(skeletonTracings).toFox) + skeletonTracingService.merge(skeletonTracings, newVersion = newTargetVersion).toFox) mergedSkeletonLayerOpt = mergedSkeletonOpt.map( _ => AnnotationLayerProto(name = mergedSkeletonName, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 377e5563fb2..539c1f403f2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -166,7 +166,7 @@ class VolumeTracingController @Inject()( tracings = request.body shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings.flatten) mt <- volumeTracingService - .merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty) + .merge(tracings.flatten, MergedVolumeStats.empty(shouldCreateSegmentIndex), Empty, newVersion = 0L) .toFox // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index ad6c21d13d7..5e09d8c9894 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -58,13 +58,13 @@ class SkeletonTracingService @Inject()( if (fromTask) newTracing.clearBoundingBox else newTracing } - def merge(tracings: Seq[SkeletonTracing]): Box[SkeletonTracing] = + def merge(tracings: Seq[SkeletonTracing], newVersion: Long): Box[SkeletonTracing] = for { tracing <- tracings.map(Full(_)).reduceLeft(mergeTwo) } yield tracing.copy( createdTimestamp = System.currentTimeMillis(), - version = 0L, + version = newVersion, ) private def mergeTwo(tracingA: Box[SkeletonTracing], tracingB: Box[SkeletonTracing]): Box[SkeletonTracing] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b2c27133e86..56a2df60978 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -643,7 +643,8 @@ class VolumeTracingService @Inject()( def merge(tracings: Seq[VolumeTracing], mergedVolumeStats: MergedVolumeStats, - newEditableMappingIdOpt: Option[String]): Box[VolumeTracing] = { + newEditableMappingIdOpt: Option[String], + newVersion: Long): Box[VolumeTracing] = { def mergeTwoWithStats(tracingAWithIndex: Box[(VolumeTracing, Int)], tracingBWithIndex: Box[(VolumeTracing, Int)]): Box[(VolumeTracing, Int)] = for { From 91449a8553dfe2052656702c8450b4a4b0148f4b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 12 Nov 2024 13:52:38 +0100 Subject: [PATCH 172/361] fix volume version --- .../tracingstore/tracings/volume/VolumeTracingService.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 56a2df60978..038161ab07f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -660,7 +660,7 @@ class VolumeTracingService @Inject()( } yield tracing.copy( createdTimestamp = System.currentTimeMillis(), - version = 0L, + version = newVersion, mappingName = newEditableMappingIdOpt, hasSegmentIndex = Some(mergedVolumeStats.createdSegmentIndex) ) From 8f30c3fd38739f0c2c36ceec9e44ff4255ba1046 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 13 Nov 2024 11:37:47 +0100 Subject: [PATCH 173/361] adapt parsing bucket keys (no more morton index) --- .../tracings/volume/VolumeTracingBucketHelper.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 2272915921b..9e7bd3a5796 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -104,7 +104,7 @@ trait BucketKeys extends WKWDataFormatHelper with AdditionalCoordinateKey { } private def parseBucketKeyXYZ(key: String) = { - val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r + val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/\\[(\\d+),(\\d+),(\\d+)]".r key match { case keyRx(name, magStr, xStr, yStr, zStr) => getBucketPosition(xStr, yStr, zStr, magStr, None).map(bucketPosition => (name, bucketPosition)) @@ -117,7 +117,7 @@ trait BucketKeys extends WKWDataFormatHelper with AdditionalCoordinateKey { key: String, additionalAxes: Seq[AdditionalAxis]): Option[(String, BucketPosition)] = { val additionalCoordinateCapture = Array.fill(additionalAxes.length)("(\\d+)").mkString(",") - val keyRx = s"([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[$additionalCoordinateCapture]\\[(\\d+),(\\d+),(\\d+)]".r + val keyRx = s"([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/\\[$additionalCoordinateCapture]\\[(\\d+),(\\d+),(\\d+)]".r val matchOpt = keyRx.findFirstMatchIn(key) matchOpt match { case Some(aMatch) => From 11ffa470e2f3e0169648da7d54d03245c4138b16 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 13 Nov 2024 11:56:38 +0100 Subject: [PATCH 174/361] WIP iron out reversion fold during merge --- .../annotation/TSAnnotationService.scala | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 745a760796e..2298fcd44da 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -791,9 +791,20 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - private def ironOutReversions(updateGroups: Seq[(Long, List[UpdateAction])]): Seq[UpdateAction] = - updateGroups.flatMap(_._2) // TODO iron out reversions. - // TODO: note: in each group the updates might have to be reversed? + private def ironOutReversions(updateGroups: Seq[(Long, Seq[UpdateAction])]): Seq[UpdateAction] = { + val ironedOutGroups: Seq[Seq[UpdateAction]] = updateGroups.foldLeft[Seq[Seq[UpdateAction]]](Seq()) { + (collected: Seq[Seq[UpdateAction]], updateGroupWithVersion) => + val revertSourceVersionOpt = revertSourceVersionFromUpdates(updateGroupWithVersion._2) + collected + } + ironedOutGroups.reverse.flatten + } + + private def revertSourceVersionFromUpdates(updates: Seq[UpdateAction]): Option[Long] = + updates.flatMap { + case u: RevertToVersionAnnotationAction => Some(u.sourceVersion) + case _ => None + }.headOption private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = From 9eb780c188ed7757e6becce0c700e5d79c063495 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 13 Nov 2024 13:46:26 +0100 Subject: [PATCH 175/361] iron out reversion folds in update history before merging editable mappings --- .../UpdateGroupHandlingUnitTestSuite.scala | 51 ++++++++++++++++++- .../annotation/TSAnnotationService.scala | 17 +------ .../annotation/UpdateGroupHandling.scala | 46 +++++++++++++++-- 3 files changed, 92 insertions(+), 22 deletions(-) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala index a2d27f7f00a..3012f03159d 100644 --- a/test/backend/UpdateGroupHandlingUnitTestSuite.scala +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -16,7 +16,7 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling )), (6L, List( - RevertToVersionAnnotationAction(sourceVersion = 1), + RevertToVersionAnnotationAction(sourceVersion = 1) )), (7L, List( @@ -27,7 +27,7 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling List( MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) - )), + )) ) val res = regroupByIsolationSensitiveActions(updateGroupsBefore) assert(res.length == 3) @@ -36,4 +36,51 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling } } + "ironOutReverts" should { + "work" in { + val updateGroupsBefore = List( + (6L, + List( + MergeTreeSkeletonAction(sourceId = 7, targetId = 7, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 8, targetId = 8, actionTracingId = Dummies.tracingId) + )), + (5L, + List( + RevertToVersionAnnotationAction(sourceVersion = 2) + )), + (4L, + List( + // Should be dropped, since we jump from 5 to 2 + RevertToVersionAnnotationAction(sourceVersion = 1) + )), + (3L, + List( + // Should be dropped, since we jump from 5 to 2 + MergeTreeSkeletonAction(sourceId = 5, targetId = 5, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 6, targetId = 6, actionTracingId = Dummies.tracingId) + )), + (2L, + List( + MergeTreeSkeletonAction(sourceId = 3, targetId = 3, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 4, targetId = 4, actionTracingId = Dummies.tracingId) + )), + (1L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 1, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 2, actionTracingId = Dummies.tracingId) + )) + ) + + val res = ironOutReverts(updateGroupsBefore) + assert(res.length == 6) + assert( + res.headOption.contains( + MergeTreeSkeletonAction(sourceId = 1, targetId = 1, actionTracingId = Dummies.tracingId))) + assert( + res.lastOption.contains( + MergeTreeSkeletonAction(sourceId = 8, targetId = 8, actionTracingId = Dummies.tracingId))) + assert(!res.contains(MergeTreeSkeletonAction(sourceId = 6, targetId = 6, actionTracingId = Dummies.tracingId))) + } + } + } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 2298fcd44da..c6bf1101a8d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -791,21 +791,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - private def ironOutReversions(updateGroups: Seq[(Long, Seq[UpdateAction])]): Seq[UpdateAction] = { - val ironedOutGroups: Seq[Seq[UpdateAction]] = updateGroups.foldLeft[Seq[Seq[UpdateAction]]](Seq()) { - (collected: Seq[Seq[UpdateAction]], updateGroupWithVersion) => - val revertSourceVersionOpt = revertSourceVersionFromUpdates(updateGroupWithVersion._2) - collected - } - ironedOutGroups.reverse.flatten - } - - private def revertSourceVersionFromUpdates(updates: Seq[UpdateAction]): Option[Long] = - updates.flatMap { - case u: RevertToVersionAnnotationAction => Some(u.sourceVersion) - case _ => None - }.headOption - private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = for { @@ -813,7 +798,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( fromJsonBytes[List[UpdateAction]]) - updatesIroned: Seq[UpdateAction] = ironOutReversions(updateGroups) + updatesIroned: Seq[UpdateAction] = ironOutReverts(updateGroups) editableMappingUpdates = updatesIroned.flatMap { case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) case _ => None diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala index de77458a9e3..ca4271be261 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -1,8 +1,9 @@ package com.scalableminds.webknossos.tracingstore.annotation import collections.SequenceUtils +import com.typesafe.scalalogging.LazyLogging -trait UpdateGroupHandling { +trait UpdateGroupHandling extends LazyLogging { def regroupByIsolationSensitiveActions( updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { @@ -25,8 +26,45 @@ trait UpdateGroupHandling { } private def isIsolationSensitiveAction(a: UpdateAction): Boolean = a match { - case _: RevertToVersionAnnotationAction => true - case _: AddLayerAnnotationAction => true - case _ => false + case _: RevertToVersionAnnotationAction => true + case _: AddLayerAnnotationAction => true + case _ => false } + + // TODO comment, unit test? + // updateGroups must be sorted descending by version number + def ironOutReverts(updateGroups: Seq[(Long, Seq[UpdateAction])]): Seq[UpdateAction] = + updateGroups.headOption match { + case None => Seq() // no update groups, return no updates + case Some(firstUpdateGroup) => + val (ironedOutGroups: Seq[Seq[UpdateAction]], _) = + updateGroups.foldLeft[(Seq[Seq[UpdateAction]], Long)]((Seq(), firstUpdateGroup._1)) { + (collectedAndNextVersion: (Seq[Seq[UpdateAction]], Long), updateGroupWithVersion) => + val collected = collectedAndNextVersion._1 + val nextVersion = collectedAndNextVersion._2 + logger.info(s"nextVersion: $nextVersion") + if (updateGroupWithVersion._1 > nextVersion) { + // We have not yet reached nextVersion. Skip to next element, Do not collect, do not change nextVersion + (collected, nextVersion) + } else { + val revertSourceVersionOpt = revertSourceVersionFromUpdates(updateGroupWithVersion._2) + logger.info(f"revertSourceVersionOpt: $revertSourceVersionOpt") + revertSourceVersionOpt match { + // This group is a revert action. Set nextVersion to revertSourceVersion, do not collect this group + case Some(revertSourceVersion) => (collected, revertSourceVersion) + // This group is a normal action. Collect it, decrement nextVersion + // Note: we *prepend* the update group here, meaning the output will go from oldest to newest version + case None => (updateGroupWithVersion._2 +: collected, nextVersion - 1) + } + } + + } + ironedOutGroups.flatten + } + + private def revertSourceVersionFromUpdates(updates: Seq[UpdateAction]): Option[Long] = + updates.flatMap { + case u: RevertToVersionAnnotationAction => Some(u.sourceVersion) + case _ => None + }.headOption } From 669a557b379b71e0f61371f518c514500ac57b90 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:21:51 +0100 Subject: [PATCH 176/361] add docstrings, remove applyUpdates bool --- .../TSRemoteWebknossosClient.scala | 1 + .../annotation/TSAnnotationService.scala | 38 ++++++++----------- .../annotation/UpdateGroupHandling.scala | 18 ++++++--- .../SkeletonTracingController.scala | 5 +-- .../controllers/TSAnnotationController.scala | 4 +- .../controllers/VolumeTracingController.scala | 5 +-- 6 files changed, 35 insertions(+), 36 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index f500843ebbc..f71adde04de 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -91,6 +91,7 @@ class TSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/annotationId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .silent .getWithJsonResponse[String] ) ?~> "annotation.idForTracing.failed" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c6bf1101a8d..0cd406ac3ff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -552,13 +552,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore.skeletons .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) - def findVolume(annotationId: String, - tracingId: String, - version: Option[Long] = None, - useCache: Boolean = true, // TODO - applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = + def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None, useCache: Boolean = true // TODO + )(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = for { - annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? + annotation <- getWithTracings(annotationId, version) tracing <- annotation.getVolume(tracingId).toFox migrated <- volumeTracingMigrationService.migrateTracing(tracing) } yield migrated @@ -567,46 +564,41 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId: String, tracingId: String, version: Option[Long] = None, - useCache: Boolean = true, // TODO - applyUpdates: Boolean = false)(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = + useCache: Boolean = true // TODO + )(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = if (tracingId == TracingId.dummy) Fox.successful(skeletonTracingService.dummyTracing) else { for { - annotation <- getWithTracings(annotationId, version) // TODO is applyUpdates still needed? + annotation <- getWithTracings(annotationId, version) tracing <- annotation.getSkeleton(tracingId).toFox migrated <- skeletonTracingMigrationService.migrateTracing(tracing) } yield migrated } - def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], - useCache: Boolean = true, - applyUpdates: Boolean = false)(implicit tc: TokenContext, - ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = + def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true)( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = Fox.combined { selectors.map { case Some(selector) => for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) - tracing <- findVolume(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) - .map(Some(_)) + tracing <- findVolume(annotationId, selector.tracingId, selector.version, useCache).map(Some(_)) } yield tracing case None => Fox.successful(None) } } - // TODO build variant without TracingSelector and Option? - def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], - useCache: Boolean = true, - applyUpdates: Boolean = false)(implicit tc: TokenContext, - ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = + def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true)( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = Fox.combined { selectors.map { case Some(selector) => for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) // TODO perf skip that if we already have it? - tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache, applyUpdates) - .map(Some(_)) + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) + tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache).map(Some(_)) } yield tracing case None => Fox.successful(None) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala index ca4271be261..d363dc0b20a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -5,14 +5,17 @@ import com.typesafe.scalalogging.LazyLogging trait UpdateGroupHandling extends LazyLogging { + /* + * Regroup update action groups, isolating the update actions that need it. + * (Currently RevertToVersionAnnotationAction and AddLayerAnnotationAction) + * Assumes they are already the only update in their respective group. + * Compare unit test for UpdateGroupHandlingUnitTestSuite + */ def regroupByIsolationSensitiveActions( updateActionGroupsWithVersions: List[(Long, List[UpdateAction])]): List[(Long, List[UpdateAction])] = { val splitGroupLists: List[List[(Long, List[UpdateAction])]] = SequenceUtils.splitAndIsolate(updateActionGroupsWithVersions.reverse)(actionGroup => actionGroup._2.exists(updateAction => isIsolationSensitiveAction(updateAction))) - // TODO assert that the *groups* that contain revert actions contain nothing else - // TODO test this - splitGroupLists.flatMap { groupsToConcatenate: List[(Long, List[UpdateAction])] => concatenateUpdateActionGroups(groupsToConcatenate) } @@ -31,8 +34,13 @@ trait UpdateGroupHandling extends LazyLogging { case _ => false } - // TODO comment, unit test? - // updateGroups must be sorted descending by version number + /* + * Iron out reverts in a sequence of update groups. + * Scans for RevertToVersionActions and skips updates as specified by the reverts + * Expects updateGroups as Version-Seq[UpdateAction] tuples, SORTED DESCENDING by version number + * Returns a single Seq of UpdateAction, in to-apply order + * Compare unit test in UpdateGroupHandlingUnitTestSuite + */ def ironOutReverts(updateGroups: Seq[(Long, Seq[UpdateAction])]): Seq[UpdateAction] = updateGroups.headOption match { case None => Seq() // no update groups, return no updates diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 27cff354817..3b3f59a242e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -73,8 +73,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findSkeleton(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( - "tracing.notFound") + tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } } @@ -85,7 +84,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - tracings <- annotationService.findMultipleSkeletons(request.body, applyUpdates = true) + tracings <- annotationService.findMultipleSkeletons(request.body) } yield { Ok(tracings.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 4e52b335ad0..444a71a6a73 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -178,7 +178,7 @@ class TSAnnotationController @Inject()( volumeTracings <- annotationService .findMultipleVolumes(volumeLayers.map { l => Some(TracingSelector(l.tracingId)) - }, applyUpdates = true) + }) .map(_.flatten) mergeEditableMappingsResultBox <- annotationService .mergeEditableMappings(request.body, @@ -205,7 +205,7 @@ class TSAnnotationController @Inject()( skeletonTracings <- annotationService .findMultipleSkeletons(skeletonLayers.map { l => Some(TracingSelector(l.tracingId)) - }, applyUpdates = true) + }) .map(_.flatten) mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( skeletonTracingService.merge(skeletonTracings, newVersion = newTargetVersion).toFox) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 539c1f403f2..f93805edbf2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -117,8 +117,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId, version, applyUpdates = true) ?~> Messages( - "tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } } @@ -129,7 +128,7 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - tracings <- annotationService.findMultipleVolumes(request.body, applyUpdates = true) + tracings <- annotationService.findMultipleVolumes(request.body) } yield { Ok(tracings.toByteArray).as(protobufMimeType) } From 4eb3911534cf7d6e240cfec20dc395155d703dd2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:38:34 +0100 Subject: [PATCH 177/361] fix merging skeleton-only, remove makeMappingEditable route --- frontend/javascripts/admin/admin_rest_api.ts | 14 -------------- .../annotation/TSAnnotationService.scala | 2 +- .../controllers/EditableMappingController.scala | 16 ---------------- ....scalableminds.webknossos.tracingstore.routes | 1 - 4 files changed, 1 insertion(+), 32 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 572205c8889..01e1ca4a96e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1585,20 +1585,6 @@ export function fetchMapping( ); } -export function makeMappingEditable( - tracingStoreUrl: string, - tracingId: string, -): Promise { - return doWithToken((token) => - Request.receiveJSON( - `${tracingStoreUrl}/tracings/mapping/${tracingId}/makeMappingEditable?token=${token}`, - { - method: "POST", - }, - ), - ); -} - export function getEditableMappingInfo( tracingStoreUrl: string, tracingId: string, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 0cd406ac3ff..12b2f752b7d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -804,7 +804,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newVolumeTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = - if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { + if (tracingsWithIds.nonEmpty && tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { before <- Instant.nowFox _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 44f0724c5e6..7ac752929fa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -28,22 +28,6 @@ class EditableMappingController @Inject()( editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { - // TODO remove once frontend sends update action - def makeMappingEditable(tracingId: String): Action[AnyContent] = - Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) - tracingMappingName <- tracing.mappingName ?~> "annotation.noMappingSet" - editableMappingInfo = editableMappingService.create(tracingMappingName) - infoJson = editableMappingService.infoJson(tracingId = tracingId, editableMappingInfo = editableMappingInfo) - } yield Ok(infoJson) - } - } - } - def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 4057054f553..380441562c1 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -34,7 +34,6 @@ POST /volume/getMultiple POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) # Editable Mappings -POST /mapping/:tracingId/makeMappingEditable @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.makeMappingEditable(tracingId: String) GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String) From f422831abd91b3f1c8d420037317673c61e1df6c Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:43:02 +0100 Subject: [PATCH 178/361] remove CompactVolumeUpdateAction --- .../annotation/UpdateActions.scala | 2 - .../tracings/volume/VolumeUpdateActions.scala | 42 ------------------- 2 files changed, 44 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index e10789e0210..e9a6443a373 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -24,7 +24,6 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ UpdateUserBoundingBoxesSkeletonAction } import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - CompactVolumeUpdateAction, CreateSegmentVolumeAction, DeleteSegmentDataVolumeAction, DeleteSegmentVolumeAction, @@ -193,7 +192,6 @@ object UpdateAction { Json.obj("name" -> "deleteSegment", "value" -> Json.toJson(s)(DeleteSegmentVolumeAction.jsonFormat)) case s: UpdateSegmentGroupsVolumeAction => Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) - case s: CompactVolumeUpdateAction => Json.toJson(s)(CompactVolumeUpdateAction.compactVolumeUpdateActionFormat) case s: UpdateMappingNameVolumeAction => Json.obj("name" -> "updateMappingName", "value" -> Json.toJson(s)(UpdateMappingNameVolumeAction.jsonFormat)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 20cb63206ed..61014e9632b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -338,48 +338,6 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme override def withActionTracingId(newTracingId: String): LayerUpdateAction = this.copy(actionTracingId = newTracingId) } - -// TODO this now exists only for UpdateBucket. Make it a slimmed down version of that rather than generic? -case class CompactVolumeUpdateAction(name: String, - value: JsObject, - actionTracingId: String, - actionTimestamp: Option[Long], - actionAuthorId: Option[String] = None, - info: Option[String] = None) - extends VolumeUpdateAction { - override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) - override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = - this.copy(actionAuthorId = authorId) - override def addInfo(info: Option[String]): UpdateAction = this.copy(info = info) - override def withActionTracingId(newTracingId: String): LayerUpdateAction = - this.copy(actionTracingId = newTracingId) -} - -object CompactVolumeUpdateAction { - implicit object compactVolumeUpdateActionFormat extends Format[CompactVolumeUpdateAction] { - override def reads(json: JsValue): JsResult[CompactVolumeUpdateAction] = - for { - name <- (json \ "name").validate[String] - actionTracingId <- (json \ "value" \ "actionTracingId").validate[String] - actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] - actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] - info <- (json \ "value" \ "info").validateOpt[String] - value <- (json \ "value") - .validate[JsObject] - .map(_ - "actionTimestamp" - "actionTimestamp" - "actionAuthorId" - "info") - } yield CompactVolumeUpdateAction(name, value, actionTracingId, actionTimestamp, actionAuthorId, info) - - override def writes(o: CompactVolumeUpdateAction): JsValue = - Json.obj( - "name" -> o.name, - "value" -> (Json.obj("actionTracingId" -> o.actionTracingId, - "actionTimestamp" -> o.actionTimestamp, - "actionAuthorId" -> o.actionAuthorId, - "info" -> o.info) ++ o.value) - ) - } -} - object UpdateBucketVolumeAction { implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] } From 8671a7f52b460c4417ea5dde60c43872c707d109 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:45:55 +0100 Subject: [PATCH 179/361] remove outdated todo comments --- .../tracingstore/controllers/TSAnnotationController.scala | 1 - .../tracings/editablemapping/EditableMappingStreams.scala | 2 -- 2 files changed, 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 444a71a6a73..32c645f370d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -156,7 +156,6 @@ class TSAnnotationController @Inject()( } } - // TODO test with skeleton-only, volume-only, editable-mapping-volume only def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = Action.async(validateJson[List[String]]) { implicit request => log() { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala index e9c7422ecd3..ec8d865ccf2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingStreams.scala @@ -61,7 +61,6 @@ class VersionedAgglomerateToGraphIterator(prefix: String, case None => getNextNonRevertedGraph.get } nextGraph = None - // TODO parse graph key? (=agglomerate id) (nextRes.key, nextRes.value, nextRes.version) } @@ -116,7 +115,6 @@ class VersionedSegmentToAgglomerateChunkIterator(prefix: String, case None => getNextNonRevertedChunk.get } nextChunk = None - // TODO parse chunk key? (nextRes.key, nextRes.value, nextRes.version) } From 4d0a81399de59576687f0af4c92588aec3c8d9f6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:54:00 +0100 Subject: [PATCH 180/361] unify naming, no more persist, use toTemporaryStore --- app/controllers/AnnotationController.scala | 2 +- app/controllers/AnnotationIOController.scala | 6 ++---- app/models/annotation/AnnotationMerger.scala | 13 +++++++------ .../annotation/WKRemoteTracingStoreClient.scala | 11 ++++------- .../handler/ProjectInformationHandler.scala | 2 +- .../annotation/handler/TaskInformationHandler.scala | 2 +- .../handler/TaskTypeInformationHandler.scala | 2 +- .../annotation/TSAnnotationService.scala | 6 +++--- .../controllers/SkeletonTracingController.scala | 4 ++-- .../controllers/TSAnnotationController.scala | 11 +++++------ .../controllers/VolumeTracingController.scala | 4 ++-- .../tracings/volume/VolumeTracingService.scala | 13 +++++++------ ...com.scalableminds.webknossos.tracingstore.routes | 4 ++-- 13 files changed, 38 insertions(+), 42 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 5cf5a39cc7d..dc7eac3ccf7 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -117,7 +117,7 @@ class AnnotationController @Inject()( for { annotationA <- provider.provideAnnotation(typ, id, request.identity) ~> NOT_FOUND annotationB <- provider.provideAnnotation(mergedTyp, mergedId, request.identity) ~> NOT_FOUND - mergedAnnotation <- annotationMerger.mergeTwo(annotationA, annotationB, persistTracing = true, request.identity) ?~> "annotation.merge.failed" + mergedAnnotation <- annotationMerger.mergeTwo(annotationA, annotationB, request.identity) ?~> "annotation.merge.failed" restrictions = annotationRestrictionDefaults.defaultsFor(mergedAnnotation) _ <- restrictions.allowAccess(request.identity) ?~> Messages("notAllowed") ~> FORBIDDEN _ <- annotationDAO.insertOne(mergedAnnotation) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 10d81c5b896..a1c6997b2ad 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -198,8 +198,7 @@ class AnnotationIOController @Inject()( mergedTracingId <- client.mergeVolumeTracingsByContents( VolumeTracings(uploadedVolumeLayersFlat.map(v => VolumeTracingOpt(Some(v.tracing)))), dataSource, - uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)), - persistTracing = true + uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)) ) } yield List( @@ -218,8 +217,7 @@ class AnnotationIOController @Inject()( else { for { mergedTracingId <- tracingStoreClient.mergeSkeletonTracingsByContents( - SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t)))), - persistTracing = true) + SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t))))) } yield List( AnnotationLayer(mergedTracingId, diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index ec538208b15..ec80b38ccd1 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -21,12 +21,11 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr def mergeTwo( annotationA: Annotation, annotationB: Annotation, - persistTracing: Boolean, issuingUser: User )(implicit ctx: DBAccessContext): Fox[Annotation] = mergeN( ObjectId.generate, - persistTracing, + toTemporaryStore = false, issuingUser._id, annotationB._dataset, annotationB._team, @@ -36,7 +35,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr def mergeN( newId: ObjectId, - persistTracing: Boolean, + toTemporaryStore: Boolean, userId: ObjectId, datasetId: ObjectId, teamId: ObjectId, @@ -47,7 +46,7 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr Fox.empty else { for { - mergedAnnotationLayers <- mergeAnnotationsInTracingstore(annotations, datasetId, newId, persistTracing) ?~> "Failed to merge annotations in tracingstore." + mergedAnnotationLayers <- mergeAnnotationsInTracingstore(annotations, datasetId, newId, toTemporaryStore) ?~> "Failed to merge annotations in tracingstore." } yield { Annotation( newId, @@ -65,11 +64,13 @@ class AnnotationMerger @Inject()(datasetDAO: DatasetDAO, tracingStoreService: Tr annotations: List[Annotation], datasetId: ObjectId, newAnnotationId: ObjectId, - persist: Boolean)(implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = + toTemporaryStore: Boolean)(implicit ctx: DBAccessContext): Fox[List[AnnotationLayer]] = for { dataset <- datasetDAO.findOne(datasetId) tracingStoreClient: WKRemoteTracingStoreClient <- tracingStoreService.clientFor(dataset) - mergedAnnotationProto <- tracingStoreClient.mergeAnnotationsByIds(annotations.map(_.id), newAnnotationId, persist) + mergedAnnotationProto <- tracingStoreClient.mergeAnnotationsByIds(annotations.map(_.id), + newAnnotationId, + toTemporaryStore) layers = mergedAnnotationProto.annotationLayers.map(AnnotationLayer.fromProto) } yield layers.toList diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 1afe4170a1c..51f8299723f 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -136,32 +136,29 @@ class WKRemoteTracingStoreClient( def mergeAnnotationsByIds(annotationIds: List[String], newAnnotationId: ObjectId, - persist: Boolean): Fox[AnnotationProto] = { + toTemporaryStore: Boolean): Fox[AnnotationProto] = { logger.debug(s"Called to merge ${annotationIds.length} annotations by ids." + baseInfo) rpc(s"${tracingStore.url}/tracings/annotation/mergedFromIds").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persist.toString) + .addQueryString("toTemporaryStore" -> toTemporaryStore.toString) .addQueryString("newAnnotationId" -> newAnnotationId.toString) .postJsonWithProtoResponse[List[String], AnnotationProto](annotationIds)(AnnotationProto) } - def mergeSkeletonTracingsByContents(tracings: SkeletonTracings, persistTracing: Boolean): Fox[String] = { + def mergeSkeletonTracingsByContents(tracings: SkeletonTracings): Fox[String] = { logger.debug("Called to merge SkeletonTracings by contents." + baseInfo) rpc(s"${tracingStore.url}/tracings/skeleton/mergedFromContents").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) .postProtoWithJsonResponse[SkeletonTracings, String](tracings) } def mergeVolumeTracingsByContents(tracings: VolumeTracings, dataSource: DataSourceLike, - initialData: List[Option[File]], - persistTracing: Boolean): Fox[String] = { + initialData: List[Option[File]]): Fox[String] = { logger.debug("Called to merge VolumeTracings by contents." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/mergedFromContents") .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("persist" -> persistTracing.toString) .postProtoWithJsonResponse[VolumeTracings, String](tracings) packedVolumeDataZips = packVolumeDataZips(initialData.flatten) _ = tracingDataSourceTemporaryStore.store(tracingId, dataSource) diff --git a/app/models/annotation/handler/ProjectInformationHandler.scala b/app/models/annotation/handler/ProjectInformationHandler.scala index 5a54e31d04e..4e14f208180 100755 --- a/app/models/annotation/handler/ProjectInformationHandler.scala +++ b/app/models/annotation/handler/ProjectInformationHandler.scala @@ -29,7 +29,7 @@ class ProjectInformationHandler @Inject()(annotationDAO: AnnotationDAO, _ <- assertNonEmpty(annotations) ?~> "project.noAnnotations" datasetId <- annotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(projectId, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, project._team, diff --git a/app/models/annotation/handler/TaskInformationHandler.scala b/app/models/annotation/handler/TaskInformationHandler.scala index e57b5955c29..69ac879f03d 100755 --- a/app/models/annotation/handler/TaskInformationHandler.scala +++ b/app/models/annotation/handler/TaskInformationHandler.scala @@ -32,7 +32,7 @@ class TaskInformationHandler @Inject()(taskDAO: TaskDAO, project <- projectDAO.findOne(task._project) datasetId <- finishedAnnotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(task._id, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, project._team, diff --git a/app/models/annotation/handler/TaskTypeInformationHandler.scala b/app/models/annotation/handler/TaskTypeInformationHandler.scala index d70427921aa..78f201b2b9a 100755 --- a/app/models/annotation/handler/TaskTypeInformationHandler.scala +++ b/app/models/annotation/handler/TaskTypeInformationHandler.scala @@ -34,7 +34,7 @@ class TaskTypeInformationHandler @Inject()(taskTypeDAO: TaskTypeDAO, user <- userOpt ?~> "user.notAuthorised" datasetId <- finishedAnnotations.headOption.map(_._dataset).toFox mergedAnnotation <- annotationMerger.mergeN(taskTypeId, - persistTracing = false, + toTemporaryStore = true, user._id, datasetId, taskType._team, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 12b2f752b7d..4f9cd3f3457 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -803,11 +803,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss newAnnotationId: String, newVolumeTracingId: String, tracingsWithIds: List[(VolumeTracing, String)], - persist: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + toTemporaryStore: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = if (tracingsWithIds.nonEmpty && tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { before <- Instant.nowFox - _ <- bool2Fox(persist) ?~> "Cannot merge editable mappings without “persist” (trying to merge compound annotations?)" + _ <- bool2Fox(!toTemporaryStore) ?~> "Cannot merge editable mappings to temporary store (trying to merge compound annotations?)" remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) remoteFallbackLayer <- SequenceUtils.findUniqueElement(remoteFallbackLayers) ?~> "Cannot merge editable mappings based on different dataset layers" @@ -818,7 +818,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, newVolumeTracingId) targetVersion = linearizedEditableMappingUpdates.length - _ <- Fox.runIf(persist) { + _ <- Fox.runIf(!toTemporaryStore) { var updateVersion = 1L Fox.serialCombined(linearizedEditableMappingUpdates) { update: UpdateAction => for { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 3b3f59a242e..654bb7a5863 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -92,7 +92,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def mergedFromContents(persist: Boolean): Action[SkeletonTracings] = + def mergedFromContents: Action[SkeletonTracings] = Action.async(validateProto[SkeletonTracings]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -103,7 +103,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin newId <- skeletonTracingService.save(processedTracing, None, processedTracing.version, - toTemporaryStore = !persist) + toTemporaryStore = false) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 32c645f370d..e2d7d9176ea 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -156,7 +156,7 @@ class TSAnnotationController @Inject()( } } - def mergedFromIds(persist: Boolean, newAnnotationId: String): Action[List[String]] = + def mergedFromIds(toTemporaryStore: Boolean, newAnnotationId: String): Action[List[String]] = Action.async(validateJson[List[String]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -184,7 +184,7 @@ class TSAnnotationController @Inject()( newAnnotationId, newVolumeId, volumeTracings.zip(volumeLayers.map(_.tracingId)), - persist) + toTemporaryStore) .futureBox (newMappingName: Option[String], newTargetVersion: Long) <- mergeEditableMappingsResultBox match { case Full(targetVersion) => Fox.successful((Some(newVolumeId), targetVersion)) @@ -195,12 +195,12 @@ class TSAnnotationController @Inject()( volumeTracings, newVolumeId, newVersion = newTargetVersion, - persist = persist) + toTemporaryStore) mergedVolumeOpt <- Fox.runIf(volumeTracings.nonEmpty)( volumeTracingService .merge(volumeTracings, mergedVolumeStats, newMappingName, newVersion = newTargetVersion)) _ <- Fox.runOptional(mergedVolumeOpt)( - volumeTracingService.save(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore = !persist)) + volumeTracingService.save(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore)) skeletonTracings <- annotationService .findMultipleSkeletons(skeletonLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -225,8 +225,7 @@ class TSAnnotationController @Inject()( .withEarliestAccessibleVersion(newTargetVersion) .withVersion(newTargetVersion) _ <- Fox.runOptional(mergedSkeletonOpt)( - skeletonTracingService - .save(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore = !persist)) + skeletonTracingService.save(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) _ <- tracingDataStore.annotations.put(newAnnotationId, newTargetVersion, mergedAnnotation) } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index f93805edbf2..1959874e985 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -156,7 +156,7 @@ class VolumeTracingController @Inject()( } } - def mergedFromContents(persist: Boolean): Action[VolumeTracings] = + def mergedFromContents: Action[VolumeTracings] = Action.async(validateProto[VolumeTracings]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { @@ -171,7 +171,7 @@ class VolumeTracingController @Inject()( // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toTemporaryStore = !persist) + newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toTemporaryStore = false) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 038161ab07f..717a301f2af 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -720,11 +720,12 @@ class VolumeTracingService @Inject()( dataLayer.bucketProvider.bucketStream(Some(tracing.version)) } - def mergeVolumeData(tracingIds: Seq[String], - tracings: Seq[VolumeTracing], - newId: String, - newVersion: Long, - persist: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { + def mergeVolumeData( + tracingIds: Seq[String], + tracings: Seq[VolumeTracing], + newId: String, + newVersion: Long, + toTemporaryStore: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val magSets = new mutable.HashSet[Set[Vec3Int]]() @@ -792,7 +793,7 @@ class VolumeTracingService @Inject()( bucketPosition, bucketBytes, newVersion, - toTemporaryStore = !persist, // TODO unify boolean direction + naming + toTemporaryStore, mergedAdditionalAxes) _ <- Fox.runIf(shouldCreateSegmentIndex)( updateSegmentIndex(segmentIndexBuffer, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 380441562c1..08aa0e77a02 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -31,7 +31,7 @@ GET /volume/:tracingId/findData POST /volume/:tracingId/segmentStatistics/volume @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentVolume(tracingId: String) POST /volume/:tracingId/segmentStatistics/boundingBox @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentBoundingBox(tracingId: String) POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple -POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents(persist: Boolean) +POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents # Editable Mappings GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) @@ -70,7 +70,7 @@ GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates # Skeleton tracings POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() -POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents(persist: Boolean) +POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From c7316fca480581911b73c667bae958485a4156d4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 11:56:44 +0100 Subject: [PATCH 181/361] further unify naming --- .../annotation/TSAnnotationService.scala | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 4f9cd3f3457..db6ab3e7e7b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -552,7 +552,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore.skeletons .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) - def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None, useCache: Boolean = true // TODO + def findVolume(annotationId: String, + tracingId: String, + version: Option[Long] = None, + fromTemporaryStore: Boolean = true // TODO )(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = for { annotation <- getWithTracings(annotationId, version) @@ -564,7 +567,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId: String, tracingId: String, version: Option[Long] = None, - useCache: Boolean = true // TODO + fromTemporaryStoreu: Boolean = true // TODO )(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = if (tracingId == TracingId.dummy) Fox.successful(skeletonTracingService.dummyTracing) @@ -576,7 +579,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield migrated } - def findMultipleVolumes(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true)( + def findMultipleVolumes(selectors: Seq[Option[TracingSelector]])( implicit tc: TokenContext, ec: ExecutionContext): Fox[List[Option[VolumeTracing]]] = Fox.combined { @@ -584,13 +587,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case Some(selector) => for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) - tracing <- findVolume(annotationId, selector.tracingId, selector.version, useCache).map(Some(_)) + tracing <- findVolume(annotationId, selector.tracingId, selector.version).map(Some(_)) } yield tracing case None => Fox.successful(None) } } - def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]], useCache: Boolean = true)( + def findMultipleSkeletons(selectors: Seq[Option[TracingSelector]])( implicit tc: TokenContext, ec: ExecutionContext): Fox[List[Option[SkeletonTracing]]] = Fox.combined { @@ -598,7 +601,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case Some(selector) => for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(selector.tracingId) - tracing <- findSkeleton(annotationId, selector.tracingId, selector.version, useCache).map(Some(_)) + tracing <- findSkeleton(annotationId, selector.tracingId, selector.version).map(Some(_)) } yield tracing case None => Fox.successful(None) } From 1cd857f3ad247a9d8c98858ab0d485f49b676705 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 13:22:19 +0100 Subject: [PATCH 182/361] fix storing tracing after AddLayer action. skip reporting annotation updates to wk if they are empty --- .../annotation/TSAnnotationService.scala | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index db6ab3e7e7b..41fa8e13430 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -397,13 +397,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } private def applyUpdates( - annotation: AnnotationWithTracings, + annotationWithTracings: AnnotationWithTracings, annotationId: String, updates: List[UpdateAction], targetVersion: Long, reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { - logger.info(s"applying ${updates.length} to go from v${annotation.version} to v$targetVersion") + logger.info(s"applying ${updates.length} to go from v${annotationWithTracings.version} to v$targetVersion") def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = @@ -418,18 +418,19 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case _ => annotationWithTracingsFox } - if (updates.isEmpty) Full(annotation) + if (updates.isEmpty) Full(annotationWithTracings) else { for { - updated <- updateIter(Some(annotation.withNewUpdaters(annotation.version, targetVersion)), updates) + updated <- updateIter( + Some(annotationWithTracings.withNewUpdaters(annotationWithTracings.version, targetVersion)), + updates) updatedWithNewVerson = updated.withVersion(targetVersion) _ = logger.info(s"flushing v$targetVersion, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson, updates) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) - _ <- Fox.runIf(reportChangesToWk)(remoteWebknossosClient.updateAnnotation( - annotationId, - updatedWithNewVerson.annotation)) // TODO perf: skip if annotation is identical + _ <- Fox.runIf(reportChangesToWk && annotationWithTracings.annotation != updated.annotation)( + remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVerson.annotation)) } yield updatedWithNewVerson } } @@ -445,9 +446,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case _ => false } } - val tracingIdsWithUpdates = updates.flatMap { + val tracingIdsWithUpdates: Set[String] = updates.flatMap { case a: LayerUpdateAction => Some(a.actionTracingId) - case a: AddLayerAnnotationAction => Some(a.tracingId) + case a: AddLayerAnnotationAction => a.tracingId // tracingId is an option, but filled on save. Drop Nones case _ => None }.toSet for { From 9b904d6ddb355c7178567a31b5d0d88465622e3e Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 14 Nov 2024 14:50:48 +0100 Subject: [PATCH 183/361] never rely on editable mapping update action agglomerateIds --- .../annotation/TSAnnotationService.scala | 3 +- .../EditableMappingUpdateActions.scala | 7 +- .../EditableMappingUpdater.scala | 66 +++---------------- 3 files changed, 12 insertions(+), 64 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 41fa8e13430..2efa88ccb2e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -351,8 +351,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss remoteDatastoreClient, editableMappingService, this, - tracingDataStore, - relyOnAgglomerateIds = false // TODO should we? + tracingDataStore ) private def editableMappingUpdaterFor( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala index 1e843d24bce..9a8dcf07e74 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdateActions.scala @@ -2,7 +2,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.tracingstore.annotation.{LayerUpdateAction, UpdateAction} -import play.api.libs.json.Format.GenericFormat import play.api.libs.json._ trait EditableMappingUpdateAction extends LayerUpdateAction { @@ -11,7 +10,7 @@ trait EditableMappingUpdateAction extends LayerUpdateAction { // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class SplitAgglomerateUpdateAction(agglomerateId: Long, +case class SplitAgglomerateUpdateAction(agglomerateId: Long, // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], @@ -36,8 +35,8 @@ object SplitAgglomerateUpdateAction { // we switched from positions to segment ids in https://github.com/scalableminds/webknossos/pull/7742. // Both are now optional to support applying old update actions stored in the db. -case class MergeAgglomerateUpdateAction(agglomerateId1: Long, - agglomerateId2: Long, +case class MergeAgglomerateUpdateAction(agglomerateId1: Long, // Unused, we now look this up by position/segment + agglomerateId2: Long, // Unused, we now look this up by position/segment segmentPosition1: Option[Vec3Int], segmentPosition2: Option[Vec3Int], segmentId1: Option[Long], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index fc545c9e8d7..bdce3b46fb7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -1,7 +1,6 @@ package com.scalableminds.webknossos.tracingstore.tracings.editablemapping import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.AgglomerateGraph.{AgglomerateEdge, AgglomerateGraph} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo @@ -42,8 +41,7 @@ class EditableMappingUpdater( remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, annotationService: TSAnnotationService, - tracingDataStore: TracingDataStore, - relyOnAgglomerateIds: Boolean // False during merge and in case of multiple actions. Then, look up all agglomerate ids at positions + tracingDataStore: TracingDataStore ) extends KeyValueStoreImplicits with ReversionHelper with FoxImplicits @@ -138,7 +136,7 @@ class EditableMappingUpdater( update.segmentPosition2, update.segmentId2, update.mag)(tokenContext) - agglomerateId <- agglomerateIdForSplitAction(update, segmentId1) + agglomerateId <- agglomerateIdForSegmentId(segmentId1) agglomerateGraph <- agglomerateGraphForIdWithFallback(editableMappingInfo, agglomerateId) _ = if (segmentId1 == 0) logger.warn( @@ -146,7 +144,7 @@ class EditableMappingUpdater( _ = if (segmentId2 == 0) logger.warn( s"Split action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Splitting outside of dataset?") - (graph1, graph2) <- tryo(splitGraph(agglomerateId, agglomerateGraph, update, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" + (graph1, graph2) <- tryo(splitGraph(agglomerateGraph, segmentId1, segmentId2)) ?~> s"splitGraph failed while removing edge between segments $segmentId1 and $segmentId2" largestExistingAgglomerateId <- largestAgglomerateId(editableMappingInfo) agglomerateId2 = largestExistingAgglomerateId + 1L _ <- updateSegmentToAgglomerate(graph2.segments, agglomerateId2) @@ -154,26 +152,6 @@ class EditableMappingUpdater( _ = updateAgglomerateGraph(agglomerateId2, graph2) } yield editableMappingInfo.withLargestAgglomerateId(agglomerateId2) - private def agglomerateIdForSplitAction(updateAction: SplitAgglomerateUpdateAction, segmentId1: Long)( - implicit ec: ExecutionContext): Fox[Long] = - if (relyOnAgglomerateIds) { - Fox.successful(updateAction.agglomerateId) - } else { - agglomerateIdForSegmentId(segmentId1) - } - - private def agglomerateIdsForMergeAction(updateAction: MergeAgglomerateUpdateAction, - segmentId1: Long, - segmentId2: Long)(implicit ec: ExecutionContext): Fox[(Long, Long)] = - if (relyOnAgglomerateIds) { - Fox.successful((updateAction.agglomerateId1, updateAction.agglomerateId2)) - } else { - for { - agglomerateId1 <- agglomerateIdForSegmentId(segmentId1) - agglomerateId2 <- agglomerateIdForSegmentId(segmentId2) - } yield (agglomerateId1, agglomerateId2) - } - private def getFromSegmentToAgglomerateBuffer(chunkKey: String): Option[Map[Long, Long]] = segmentToAgglomerateBuffer.get(chunkKey).flatMap { case (chunkFromBuffer, isToBeReverted) => @@ -255,9 +233,7 @@ class EditableMappingUpdater( private def emptyAgglomerateGraph = AgglomerateGraph(Seq(), Seq(), Seq(), Seq()) - private def splitGraph(agglomerateId: Long, - agglomerateGraph: AgglomerateGraph, - update: SplitAgglomerateUpdateAction, + private def splitGraph(agglomerateGraph: AgglomerateGraph, segmentId1: Long, segmentId2: Long): (AgglomerateGraph, AgglomerateGraph) = { val edgesAndAffinitiesMinusOne: Seq[(AgglomerateEdge, Float)] = @@ -266,10 +242,6 @@ class EditableMappingUpdater( (from == segmentId1 && to == segmentId2) || (from == segmentId2 && to == segmentId1) } if (edgesAndAffinitiesMinusOne.length == agglomerateGraph.edges.length) { - if (relyOnAgglomerateIds) { - logger.warn( - s"Split action for editable mapping $tracingId: Edge to remove ($segmentId1 at ${update.segmentPosition1} in mag ${update.mag} to $segmentId2 at ${update.segmentPosition2} in mag ${update.mag} in agglomerate $agglomerateId) already absent. This split becomes a no-op.") - } (agglomerateGraph, emptyAgglomerateGraph) } else { val graph1Nodes: Set[Long] = @@ -359,17 +331,12 @@ class EditableMappingUpdater( _ = if (segmentId2 == 0) logger.warn( s"Merge action for editable mapping $tracingId: Looking up segment id at position ${update.segmentPosition2} in mag ${update.mag} returned invalid value zero. Merging outside of dataset?") - (agglomerateId1, agglomerateId2) <- agglomerateIdsForMergeAction(update, segmentId1, segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" + agglomerateId1 <- agglomerateIdForSegmentId(segmentId1) ?~> "Failed to look up agglomerate ids for merge action segments" + agglomerateId2 <- agglomerateIdForSegmentId(segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" agglomerateGraph1 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId1) ?~> s"Failed to get agglomerate graph for id $agglomerateId1" agglomerateGraph2 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId2) ?~> s"Failed to get agglomerate graph for id $agglomerateId2" _ <- bool2Fox(agglomerateGraph2.segments.contains(segmentId2)) ?~> s"Segment $segmentId2 as queried by position ${update.segmentPosition2} is not contained in fetched agglomerate graph for agglomerate $agglomerateId2" - mergedGraphOpt = mergeGraph(agglomerateGraph1, - agglomerateGraph2, - update, - agglomerateId1, - agglomerateId2, - segmentId1, - segmentId2) + mergedGraphOpt = mergeGraph(agglomerateGraph1, agglomerateGraph2, segmentId1, segmentId2) _ <- Fox.runOptional(mergedGraphOpt) { mergedGraph => for { _ <- updateSegmentToAgglomerate(agglomerateGraph2.segments, agglomerateId1) ?~> s"Failed to update segment to agglomerate buffer" @@ -381,15 +348,10 @@ class EditableMappingUpdater( private def mergeGraph(agglomerateGraph1: AgglomerateGraph, agglomerateGraph2: AgglomerateGraph, - update: MergeAgglomerateUpdateAction, - agglomerateId1: Long, - agglomerateId2: Long, segmentId1: Long, segmentId2: Long): Option[AgglomerateGraph] = { val segment1IsValid = agglomerateGraph1.segments.contains(segmentId1) val segment2IsValid = agglomerateGraph2.segments.contains(segmentId2) - warnOnInvalidSegmentToMerge(segment1IsValid, segmentId1, update.segmentPosition1, update.mag, agglomerateId1) - warnOnInvalidSegmentToMerge(segment2IsValid, segmentId2, update.segmentPosition2, update.mag, agglomerateId2) if (segment1IsValid && segment2IsValid) { val newEdge = AgglomerateEdge(segmentId1, segmentId2) val newEdgeAffinity = 255.0f @@ -403,17 +365,6 @@ class EditableMappingUpdater( } else None } - private def warnOnInvalidSegmentToMerge(isValid: Boolean, - segmentId: Long, - position: Option[Vec3Int], - mag: Vec3Int, - agglomerateId: Long): Unit = - if (!isValid && relyOnAgglomerateIds) { - logger.warn( - s"Merge action for editable mapping $tracingId: segment $segmentId as looked up at $position in mag $mag is not present in agglomerate $agglomerateId. This merge becomes a no-op" - ) - } - def revertToVersion(sourceVersion: Long)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- bool2Fox(sourceVersion <= oldVersion) ?~> "trying to revert editable mapping to a version not yet present in the database" @@ -467,7 +418,6 @@ class EditableMappingUpdater( remoteDatastoreClient, editableMappingService, annotationService, - tracingDataStore, - relyOnAgglomerateIds = relyOnAgglomerateIds + tracingDataStore ) } From c60c713c5c2d3a592bd4a84b480a28b5ddc13841 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 09:22:45 +0100 Subject: [PATCH 184/361] wip temporary tracings --- .../storage/RedisTemporaryStore.scala | 3 + .../tracingstore/TracingStoreModule.scala | 5 +- .../SkeletonTracingController.scala | 17 ++- .../controllers/TSAnnotationController.scala | 34 ++---- .../controllers/VolumeTracingController.scala | 39 ++----- .../tracings/TemporaryTracingService.scala | 85 ++++++++++++++ .../tracings/TracingService.scala | 50 -------- .../skeleton/SkeletonTracingService.scala | 33 +++--- .../volume/VolumeTracingBucketHelper.scala | 54 ++++----- .../tracings/volume/VolumeTracingLayer.scala | 33 ++---- .../volume/VolumeTracingService.scala | 108 +++++++++--------- 11 files changed, 227 insertions(+), 234 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala delete mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala index fde054f6772..7b064b078a6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RedisTemporaryStore.scala @@ -44,6 +44,9 @@ trait RedisTemporaryStore extends LazyLogging { r.keys(pattern).map(_.flatten).getOrElse(List()) } + def insertKey(id: String, expirationOpt: Option[FiniteDuration] = None): Fox[Unit] = + insert(id, "", expirationOpt) + def insert(id: String, value: String, expirationOpt: Option[FiniteDuration] = None): Fox[Unit] = withExceptionHandler { expirationOpt diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index 49ac749ec1d..8057d8fd91d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -1,15 +1,15 @@ package com.scalableminds.webknossos.tracingstore -import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import com.scalableminds.webknossos.tracingstore.tracings.{TemporaryTracingService, TracingDataStore} +import org.apache.pekko.actor.ActorSystem class TracingStoreModule extends AbstractModule { @@ -18,6 +18,7 @@ class TracingStoreModule extends AbstractModule { override def configure(): Unit = { bind(classOf[ActorSystem]).annotatedWith(Names.named("webknossos-tracingstore")).toInstance(system) bind(classOf[TracingDataStore]).asEagerSingleton() + bind(classOf[TemporaryTracingService]).asEagerSingleton() bind(classOf[SkeletonTracingService]).asEagerSingleton() bind(classOf[VolumeTracingService]).asEagerSingleton() bind(classOf[TracingStoreAccessTokenService]).asEagerSingleton() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 654bb7a5863..0889f7d5ed2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -3,18 +3,18 @@ package com.scalableminds.webknossos.tracingstore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} +import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.services.UserAccessRequest +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ +import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingSelector} import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} import play.api.i18n.Messages import play.api.libs.json.Json -import com.scalableminds.webknossos.datastore.controllers.Controller import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import scala.concurrent.ExecutionContext @@ -43,7 +43,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracing = request.body - skeletonTracingService.save(tracing, None, 0).map { newId => + skeletonTracingService.saveSkeleton(tracing, None, 0).map { newId => Ok(Json.toJson(newId)) } } @@ -57,7 +57,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[SkeletonTracing] => tracingOpt match { - case Some(tracing) => skeletonTracingService.save(tracing, None, 0).map(Some(_)) + case Some(tracing) => skeletonTracingService.saveSkeleton(tracing, None, 0).map(Some(_)) case _ => Fox.successful(None) } }) @@ -100,10 +100,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin for { mergedTracing <- Fox.box2Fox(skeletonTracingService.merge(tracings.flatten, newVersion = 0L)) processedTracing = skeletonTracingService.remapTooLargeTreeIds(mergedTracing) - newId <- skeletonTracingService.save(processedTracing, - None, - processedTracing.version, - toTemporaryStore = false) + newId <- skeletonTracingService.saveSkeleton(processedTracing, None, processedTracing.version) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index e2d7d9176ea..e19480c8a22 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -4,28 +4,14 @@ import collections.SequenceUtils import com.google.inject.Inject import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.Annotation.{ - AnnotationLayerProto, - AnnotationLayerTypeProto, - AnnotationProto -} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationLayerTypeProto, AnnotationProto} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.services.UserAccessRequest -import com.scalableminds.webknossos.tracingstore.tracings.{ - KeyValueStoreImplicits, - TracingDataStore, - TracingId, - TracingSelector -} import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService -import com.scalableminds.webknossos.tracingstore.annotation.{ - AnnotationTransactionService, - ResetToBaseAnnotationAction, - TSAnnotationService, - UpdateActionGroup -} +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, ResetToBaseAnnotationAction, TSAnnotationService, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService +import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import net.liftweb.common.{Empty, Failure, Full} @@ -41,17 +27,21 @@ class TSAnnotationController @Inject()( annotationService: TSAnnotationService, annotationTransactionService: AnnotationTransactionService, skeletonTracingService: SkeletonTracingService, + temporaryTracingService: TemporaryTracingService, volumeTracingService: VolumeTracingService, tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { - def save(annotationId: String): Action[AnnotationProto] = + def save(annotationId: String, toTemporaryStore: Boolean = false): Action[AnnotationProto] = Action.async(validateProto[AnnotationProto]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - _ <- tracingDataStore.annotations.put(annotationId, 0L, request.body) + _ <- if (toTemporaryStore) + temporaryTracingService.saveAnnotationProto(annotationId, request.body) + else + tracingDataStore.annotations.put(annotationId, 0L, request.body) } yield Ok } } @@ -200,7 +190,7 @@ class TSAnnotationController @Inject()( volumeTracingService .merge(volumeTracings, mergedVolumeStats, newMappingName, newVersion = newTargetVersion)) _ <- Fox.runOptional(mergedVolumeOpt)( - volumeTracingService.save(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore)) + volumeTracingService.saveVolume(_, Some(newVolumeId), version = newTargetVersion, toTemporaryStore)) skeletonTracings <- annotationService .findMultipleSkeletons(skeletonLayers.map { l => Some(TracingSelector(l.tracingId)) @@ -225,8 +215,8 @@ class TSAnnotationController @Inject()( .withEarliestAccessibleVersion(newTargetVersion) .withVersion(newTargetVersion) _ <- Fox.runOptional(mergedSkeletonOpt)( - skeletonTracingService.save(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) - _ <- tracingDataStore.annotations.put(newAnnotationId, newTargetVersion, mergedAnnotation) + skeletonTracingService.saveSkeleton(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) + _ <- tracingDataStore.annotations.put(newAnnotationId, newTargetVersion, mergedAnnotation) // TODO toTemporaryStore } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 1959874e985..bec3799ad48 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -4,44 +4,21 @@ import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto -import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} -import com.scalableminds.webknossos.datastore.helpers.{ - GetSegmentIndexParameters, - ProtoGeometryImplicits, - SegmentStatisticsParameters -} +import com.scalableminds.webknossos.datastore.helpers.{GetSegmentIndexParameters, ProtoGeometryImplicits, SegmentStatisticsParameters} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{ - LengthUnit, - VoxelSize, - WebknossosAdHocMeshRequest, - WebknossosDataRequest -} +import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize, WebknossosAdHocMeshRequest, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - ImportVolumeDataVolumeAction, - MagRestrictions, - MergedVolumeStats, - TSFullMeshService, - VolumeDataZipFormat, - VolumeSegmentIndexService, - VolumeSegmentStatisticsService, - VolumeTracingService -} +import com.scalableminds.webknossos.tracingstore.tracings.volume._ import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingStoreAccessTokenService, - TracingStoreConfig -} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig} import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile @@ -87,7 +64,7 @@ class VolumeTracingController @Inject()( logTime(slackNotificationService.noticeSlowRequest) { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val tracing = request.body - volumeTracingService.save(tracing, newTracingId, 0).map { newId => + volumeTracingService.saveVolume(tracing, newTracingId, 0).map { newId => Ok(Json.toJson(newId)) } } @@ -101,7 +78,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { val savedIds = Fox.sequence(request.body.map { tracingOpt: Option[VolumeTracing] => tracingOpt match { - case Some(tracing) => volumeTracingService.save(tracing, None, 0).map(Some(_)) + case Some(tracing) => volumeTracingService.saveVolume(tracing, None, 0).map(Some(_)) case _ => Fox.successful(None) } }) @@ -171,7 +148,7 @@ class VolumeTracingController @Inject()( // segment lists for multi-volume uploads are not supported yet, compare https://github.com/scalableminds/webknossos/issues/6887 mergedTracing = mt.copy(segments = List.empty) - newId <- volumeTracingService.save(mergedTracing, None, mergedTracing.version, toTemporaryStore = false) + newId <- volumeTracingService.saveVolume(mergedTracing, None, mergedTracing.version) } yield Ok(Json.toJson(newId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala new file mode 100644 index 00000000000..acc9fb9dc0f --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala @@ -0,0 +1,85 @@ +package com.scalableminds.webknossos.tracingstore.tracings + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.bool2Fox +import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto +import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore +import scalapb.GeneratedMessageCompanion + +import javax.inject.Inject +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt + +// This temporary store is for temporary tracings only (e.g. compound projects) +// and cannot be used for download or versioning +class TemporaryTracingService @Inject()( + skeletonStore: TemporaryTracingStore[SkeletonTracing], + volumeStore: TemporaryTracingStore[VolumeTracing], + volumeDataStore: TemporaryTracingStore[Array[Byte]], + annotationStore: TemporaryTracingStore[AnnotationProto], + temporaryTracingIdStore: TracingStoreRedisStore)(implicit ec: ExecutionContext) { + + implicit def skeletonTracingCompanion: GeneratedMessageCompanion[SkeletonTracing] = SkeletonTracing + implicit def volumeTracingCompanion: GeneratedMessageCompanion[VolumeTracing] = VolumeTracing + implicit def annotationProtoCompanion: GeneratedMessageCompanion[AnnotationProto] = AnnotationProto + + // this should be longer than maxCacheTime in webknossos/AnnotationStore + // so that the references saved there remain valid throughout their life + private val temporaryStoreTimeout = 70 minutes + + // the information that a tracing is/was temporary needs to be stored longer + // to provide useful error messages to the user if the temporary tracing is no longer present + private val temporaryIdStoreTimeout = 10 days + + private def temporaryTracingIdKey(tracingId: String) = + s"temporaryTracingId___$tracingId" + + private def temporaryAnnotationIdKey(tracingId: String) = + s"temporaryTracingId___$tracingId" + + def findVolumeBucket(bucketKey: String): Fox[Array[Byte]] = + volumeDataStore.find(bucketKey) + + def findAllVolumeBucketsWithPrefix(bucketPrefix: String): collection.Map[String, Array[Byte]] = + volumeDataStore.findAllConditionalWithKey(key => key.startsWith(bucketPrefix)) + + def saveSkeleton(tracingId: String, skeletonTracing: SkeletonTracing): Fox[Unit] = { + skeletonStore.insert(tracingId, skeletonTracing, Some(temporaryStoreTimeout)) + registerTracingId(tracingId) + Fox.successful(()) + } + + def saveVolume(tracingId: String, volumeTracing: VolumeTracing): Fox[Unit] = { + volumeStore.insert(tracingId, volumeTracing, Some(temporaryStoreTimeout)) + registerTracingId(tracingId) + Fox.successful(()) + } + + def saveVolumeBucket(bucketKey: String, bucketData: Array[Byte]): Fox[Unit] = { + volumeDataStore.insert(bucketKey, bucketData, Some(temporaryStoreTimeout)) + Fox.successful(()) + } + + def saveAnnotationProto(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = { + annotationStore.insert(annotationId, annotationProto, Some(temporaryStoreTimeout)) + registerAnnotationId(annotationId) + Fox.successful(()) + } + + def isTemporaryTracing(tracingId: String): Fox[Boolean] = + temporaryTracingIdStore.contains(temporaryTracingIdKey(tracingId)) + + def assertTracingStillInCache(tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- bool2Fox(volumeStore.contains(tracingId)) ?~> "Temporary Volume Tracing expired" + } yield () + + private def registerTracingId(tracingId: String) = + temporaryTracingIdStore.insertKey(temporaryTracingIdKey(tracingId), Some(temporaryIdStoreTimeout)) + + private def registerAnnotationId(annotationId: String) = + temporaryTracingIdStore.insertKey(temporaryAnnotationIdKey(annotationId), Some(temporaryIdStoreTimeout)) + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala deleted file mode 100644 index 4378e671567..00000000000 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ /dev/null @@ -1,50 +0,0 @@ -package com.scalableminds.webknossos.tracingstore.tracings - -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.tracingstore.TracingStoreRedisStore -import com.typesafe.scalalogging.LazyLogging -import scalapb.{GeneratedMessage, GeneratedMessageCompanion} - -import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ - -trait TracingService[T <: GeneratedMessage] - extends KeyValueStoreImplicits - with FoxImplicits - with LazyLogging - with ColorGenerator - with BoundingBoxMerger { - - implicit val ec: ExecutionContext - - def tracingStore: FossilDBClient - - def temporaryTracingStore: TemporaryTracingStore[T] - - def temporaryTracingIdStore: TracingStoreRedisStore - - implicit def tracingCompanion: GeneratedMessageCompanion[T] - - // this should be longer than maxCacheTime in webknossos/AnnotationStore - // so that the references saved there remain valid throughout their life - private val temporaryStoreTimeout = 70 minutes - - // the information that a tracing is/was temporary needs to be stored longer - // to provide useful error messages to the user if the temporary tracing is no longer present - private val temporaryIdStoreTimeout = 10 days - - protected def temporaryIdKey(tracingId: String) = - s"temporaryTracingId___$tracingId" - - def save(tracing: T, tracingId: Option[String], version: Long, toTemporaryStore: Boolean = false): Fox[String] = { - val id = tracingId.getOrElse(TracingId.generate) - if (toTemporaryStore) { - temporaryTracingStore.insert(id, tracing, Some(temporaryStoreTimeout)) - temporaryTracingIdStore.insert(temporaryIdKey(id), "", Some(temporaryIdStoreTimeout)) - Fox.successful(id) - } else { - tracingStore.put(id, version, tracing).map(_ => id) - } - } - -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 5e09d8c9894..a16a53e7ba8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -2,12 +2,11 @@ package com.scalableminds.webknossos.tracingstore.tracings.skeleton import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} -import com.scalableminds.util.tools.FoxImplicits +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.{ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis -import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreRedisStore} import com.scalableminds.webknossos.tracingstore.tracings._ import net.liftweb.common.{Box, Full} @@ -15,23 +14,29 @@ import scala.concurrent.ExecutionContext class SkeletonTracingService @Inject()( tracingDataStore: TracingDataStore, - val temporaryTracingStore: TemporaryTracingStore[SkeletonTracing], - val handledGroupIdStore: TracingStoreRedisStore, - val temporaryTracingIdStore: TracingStoreRedisStore, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val uncommittedUpdatesStore: TracingStoreRedisStore, - val tracingMigrationService: SkeletonTracingMigrationService)(implicit val ec: ExecutionContext) - extends TracingService[SkeletonTracing] - with KeyValueStoreImplicits + temporaryTracingService: TemporaryTracingService, + val tracingMigrationService: SkeletonTracingMigrationService // TODO why is this unused? +)(implicit val ec: ExecutionContext) + extends KeyValueStoreImplicits with ProtoGeometryImplicits + with BoundingBoxMerger + with ColorGenerator with FoxImplicits { - val tracingType: TracingType.Value = TracingType.skeleton - - val tracingStore: FossilDBClient = tracingDataStore.skeletons - implicit val tracingCompanion: SkeletonTracing.type = SkeletonTracing + def saveSkeleton(tracing: SkeletonTracing, + tracingId: Option[String], + version: Long, + toTemporaryStore: Boolean = false): Fox[String] = { + val id = tracingId.getOrElse(TracingId.generate) + if (toTemporaryStore) { + temporaryTracingService.saveSkeleton(id, tracing).map(_ => id) + } else { + tracingDataStore.skeletons.put(id, version, tracing).map(_ => id) + } + } + def adaptSkeletonForDuplicate(tracing: SkeletonTracing, fromTask: Boolean, editPosition: Option[Vec3Int], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 9e7bd3a5796..ca48bb3e5e1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -13,7 +13,6 @@ import net.liftweb.common.{Empty, Failure, Full} import scala.annotation.tailrec import scala.concurrent.ExecutionContext -import scala.concurrent.duration._ trait ReversionHelper { val revertedValue: Array[Byte] = Array[Byte](0) @@ -173,39 +172,36 @@ trait VolumeTracingBucketHelper with ReversionHelper { implicit def ec: ExecutionContext + def volumeDataStore: FossilDBClient + def temporaryTracingService: TemporaryTracingService - // used to store compound annotations - private val temporaryVolumeDataTimeout: FiniteDuration = 70 minutes - - implicit def volumeDataStore: FossilDBClient - implicit def temporaryVolumeDataStore: TemporaryVolumeDataStore - - private def loadBucketFromTemporaryStore(key: String) = - temporaryVolumeDataStore.find(key).map(VersionedKeyValuePair(VersionedKey(key, 0), _)) - - def loadBucket(dataLayer: VolumeTracingLayer, + def loadBucket(volumeTracingLayer: VolumeTracingLayer, bucket: BucketPosition, version: Option[Long] = None): Fox[Array[Byte]] = { - val key = buildBucketKey(dataLayer.name, bucket, dataLayer.additionalAxes) + val bucketKey = buildBucketKey(volumeTracingLayer.name, bucket, volumeTracingLayer.additionalAxes) + + val dataFox = + if (volumeTracingLayer.isTemporaryTracing) + temporaryTracingService.findVolumeBucket(bucketKey).map(VersionedKeyValuePair(VersionedKey(bucketKey, 0), _)) + else + volumeDataStore.get(bucketKey, version, mayBeEmpty = Some(true)) - val dataFox = loadBucketFromTemporaryStore(key) match { - case Some(data) => Fox.successful(data) - case None => volumeDataStore.get(key, version, mayBeEmpty = Some(true)) - } val unpackedDataFox = dataFox.flatMap { versionedVolumeBucket => if (isRevertedElement(versionedVolumeBucket)) Fox.empty else { val debugInfo = - s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" + s"key: $bucketKey, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" Fox.successful( - decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer), debugInfo)) + decompressIfNeeded(versionedVolumeBucket.value, + expectedUncompressedBucketSizeFor(volumeTracingLayer), + debugInfo)) } } unpackedDataFox.futureBox.flatMap { case Full(unpackedData) => Fox.successful(unpackedData) case Empty => - if (dataLayer.includeFallbackDataIfAvailable && dataLayer.tracing.fallbackLayer.nonEmpty) { - loadFallbackBucket(dataLayer, bucket) + if (volumeTracingLayer.includeFallbackDataIfAvailable && volumeTracingLayer.tracing.fallbackLayer.nonEmpty) { + loadFallbackBucket(volumeTracingLayer, bucket) } else Fox.empty case f: Failure => f.toFox } @@ -249,20 +245,18 @@ trait VolumeTracingBucketHelper version: Long, toTemporaryStore: Boolean, additionalAxes: Option[Seq[AdditionalAxis]]): Fox[Unit] = { - val key = buildBucketKey(tracingId, bucket, additionalAxes) + val bucketKey = buildBucketKey(tracingId, bucket, additionalAxes) val compressedBucket = compressVolumeBucket(data, expectedUncompressedBucketSizeFor(elementClass)) if (toTemporaryStore) { - // Note that this temporary store is for temporary volumes only (e.g. compound projects) - // and cannot be used for download or versioning - Fox.successful(temporaryVolumeDataStore.insert(key, compressedBucket, Some(temporaryVolumeDataTimeout))) + temporaryTracingService.saveVolumeBucket(bucketKey, compressedBucket) } else { - volumeDataStore.put(key, version, compressedBucket) + volumeDataStore.put(bucketKey, version, compressedBucket) } } def bucketStream(dataLayer: VolumeTracingLayer, version: Option[Long]): Iterator[(BucketPosition, Array[Byte])] = { - val key = buildKeyPrefix(dataLayer.name) - new BucketIterator(key, + val keyPrefix = buildKeyPrefix(dataLayer.name) + new BucketIterator(keyPrefix, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version, @@ -271,8 +265,8 @@ trait VolumeTracingBucketHelper def bucketStreamWithVersion(dataLayer: VolumeTracingLayer, version: Option[Long]): Iterator[(BucketPosition, Array[Byte], Long)] = { - val key = buildKeyPrefix(dataLayer.name) - new VersionedBucketIterator(key, + val keyPrefix = buildKeyPrefix(dataLayer.name) + new VersionedBucketIterator(keyPrefix, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version, @@ -281,7 +275,7 @@ trait VolumeTracingBucketHelper def bucketStreamFromTemporaryStore(dataLayer: VolumeTracingLayer): Iterator[(BucketPosition, Array[Byte])] = { val keyPrefix = buildKeyPrefix(dataLayer.name) - val keyValuePairs = temporaryVolumeDataStore.findAllConditionalWithKey(key => key.startsWith(keyPrefix)) + val keyValuePairs = temporaryTracingService.findAllVolumeBucketsWithPrefix(keyPrefix) keyValuePairs.flatMap { case (bucketKey, data) => parseBucketKey(bucketKey, dataLayer.additionalAxes).map(tuple => (tuple._2, data)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 10868db7097..c7d7e7aa874 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -4,23 +4,19 @@ import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, MagLocator} +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.tracingstore.tracings.{ - FossilDBClient, - TemporaryTracingStore, - TemporaryVolumeDataStore -} +import com.scalableminds.webknossos.tracingstore.tracings.{FossilDBClient, TemporaryTracingService} import sun.reflect.generics.reflectiveObjects.NotImplementedException +import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext -import ucar.ma2.{Array => MultiArray} trait AbstractVolumeTracingBucketProvider extends BucketProvider with VolumeTracingBucketHelper with FoxImplicits { @@ -33,7 +29,7 @@ class VolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit val ec: Ex extends AbstractVolumeTracingBucketProvider { val volumeDataStore: FossilDBClient = layer.volumeDataStore - val temporaryVolumeDataStore: TemporaryVolumeDataStore = layer.volumeDataCache + val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = loadBucket(layer, readInstruction.bucket, readInstruction.version) @@ -49,20 +45,14 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v extends AbstractVolumeTracingBucketProvider { val volumeDataStore: FossilDBClient = layer.volumeDataStore - val temporaryVolumeDataStore: TemporaryVolumeDataStore = layer.volumeDataCache - val temporaryTracingStore: TemporaryTracingStore[VolumeTracing] = layer.temporaryTracingStore + val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = for { - _ <- assertTracingStillInCache(layer) + _ <- temporaryTracingService.assertTracingStillInCache(layer.name) data <- loadBucket(layer, readInstruction.bucket, readInstruction.version) } yield data - private def assertTracingStillInCache(layer: VolumeTracingLayer)(implicit ec: ExecutionContext): Fox[Unit] = - for { - _ <- bool2Fox(temporaryTracingStore.contains(layer.name)) ?~> "Temporary Volume Tracing expired" - } yield () - override def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = bucketStreamFromTemporaryStore(layer) @@ -73,15 +63,14 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v case class VolumeTracingLayer( name: String, volumeTracingService: VolumeTracingService, + temporaryTracingService: TemporaryTracingService, isTemporaryTracing: Boolean = false, includeFallbackDataIfAvailable: Boolean = false, tracing: VolumeTracing, tokenContext: TokenContext, - additionalAxes: Option[Seq[AdditionalAxis]] -)(implicit val volumeDataStore: FossilDBClient, - implicit val volumeDataCache: TemporaryVolumeDataStore, - implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], - implicit val ec: ExecutionContext) + additionalAxes: Option[Seq[AdditionalAxis]], + volumeDataStore: FossilDBClient, +)(implicit val ec: ExecutionContext) extends SegmentationLayer with ProtoGeometryImplicits { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 717a301f2af..a9d23065c6a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -8,30 +8,19 @@ import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection +import com.scalableminds.webknossos.datastore.models._ import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer, ElementClass} -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest -import com.scalableminds.webknossos.datastore.models.{ - BucketPosition, - UnsignedInteger, - UnsignedIntegerArray, - VoxelSize, - WebknossosAdHocMeshRequest -} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteDatastoreClient, - TSRemoteWebknossosClient, - TracingStoreRedisStore -} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} @@ -47,24 +36,20 @@ import scala.concurrent.ExecutionContext import scala.concurrent.duration._ class VolumeTracingService @Inject()( - val tracingDataStore: TracingDataStore, - val adHocMeshServiceHolder: AdHocMeshServiceHolder, - implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], - implicit val temporaryVolumeDataStore: TemporaryVolumeDataStore, - implicit val ec: ExecutionContext, - val handledGroupIdStore: TracingStoreRedisStore, - val uncommittedUpdatesStore: TracingStoreRedisStore, - val temporaryTracingIdStore: TracingStoreRedisStore, + tracingDataStore: TracingDataStore, + adHocMeshServiceHolder: AdHocMeshServiceHolder, + temporaryFileCreator: TemporaryFileCreator, + volumeSegmentIndexService: VolumeSegmentIndexService, + val temporaryTracingService: TemporaryTracingService, val remoteDatastoreClient: TSRemoteDatastoreClient, - val remoteWebknossosClient: TSRemoteWebknossosClient, - val temporaryFileCreator: TemporaryFileCreator, - editableMappingService: EditableMappingService, - volumeSegmentIndexService: VolumeSegmentIndexService -) extends TracingService[VolumeTracing] - with VolumeTracingBucketHelper + val remoteWebknossosClient: TSRemoteWebknossosClient +)(implicit val ec: ExecutionContext) + extends VolumeTracingBucketHelper with WKWDataFormatHelper with FallbackDataHelper with DataFinder + with ColorGenerator + with BoundingBoxMerger with VolumeDataZipHelper with ProtoGeometryImplicits with FoxImplicits @@ -90,6 +75,18 @@ class VolumeTracingService @Inject()( private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) + def saveVolume(tracing: VolumeTracing, + tracingId: Option[String], + version: Long, + toTemporaryStore: Boolean = false): Fox[String] = { + val id = tracingId.getOrElse(TracingId.generate) + if (toTemporaryStore) { + temporaryTracingService.saveVolume(id, tracing).map(_ => id) + } else { + tracingDataStore.volumes.put(id, version, tracing).map(_ => id) + } + } + private def updateSegmentIndex( segmentIndexBuffer: VolumeSegmentIndexBuffer, bucketPosition: BucketPosition, @@ -469,15 +466,12 @@ class VolumeTracingService @Inject()( zipResult } - def isTemporaryTracing(tracingId: String): Fox[Boolean] = - temporaryTracingIdStore.contains(temporaryIdKey(tracingId)) - def data(tracingId: String, tracing: VolumeTracing, dataRequests: DataRequestCollection, includeFallbackDataIfAvailable: Boolean = false)(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { - isTemporaryTracing <- isTemporaryTracing(tracingId) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) dataLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing, includeFallbackDataIfAvailable) requests = dataRequests.map(r => DataServiceDataRequest(null, dataLayer, r.cuboid(dataLayer), r.settings.copy(appliedAgglomerate = None))) @@ -538,7 +532,7 @@ class VolumeTracingService @Inject()( newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { var bucketCount = 0 for { - isTemporaryTracing <- isTemporaryTracing(sourceTracingId) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(sourceTracingId) sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream( Some(sourceTracing.version)) @@ -588,6 +582,8 @@ class VolumeTracingService @Inject()( name = tracingId, isTemporaryTracing = isTemporaryTracing, volumeTracingService = this, + temporaryTracingService = this.temporaryTracingService, + volumeDataStore = volumeDataStore, includeFallbackDataIfAvailable = includeFallbackDataIfAvailable, tracing = tracing, tokenContext = tc, @@ -601,36 +597,42 @@ class VolumeTracingService @Inject()( for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." _ <- bool2Fox(mags.nonEmpty) ?~> "Initializing without any mags. No data or mag restrictions too tight?" - id <- save(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), - Some(tracingId), - tracing.version, - toTemporaryStore) + id <- saveVolume(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), + Some(tracingId), + tracing.version, + toTemporaryStore) } yield id def volumeBucketsAreEmpty(tracingId: String): Boolean = volumeDataStore.getMultipleKeys(None, Some(tracingId), limit = Some(1))(toBox).isEmpty def createAdHocMesh(tracingId: String, tracing: VolumeTracing, request: WebknossosAdHocMeshRequest)( - implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { - val volumeLayer = volumeTracingLayer(tracingId, tracing, includeFallbackDataIfAvailable = true) - val adHocMeshRequest = AdHocMeshRequest( - None, - volumeLayer, - request.cuboid(volumeLayer), - request.segmentId, - request.voxelSizeFactorInUnit, - None, - None, - request.additionalCoordinates, - request.findNeighbors - ) - adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) - } + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = + for { + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + volumeLayer = volumeTracingLayer(tracingId, + tracing, + includeFallbackDataIfAvailable = true, + isTemporaryTracing = isTemporaryTracing) + adHocMeshRequest = AdHocMeshRequest( + None, + volumeLayer, + request.cuboid(volumeLayer), + request.segmentId, + request.voxelSizeFactorInUnit, + None, + None, + request.additionalCoordinates, + request.findNeighbors + ) + result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } yield result def findData(tracingId: String, tracing: VolumeTracing)(implicit tc: TokenContext): Fox[Option[Vec3Int]] = for { _ <- Fox.successful(()) - volumeLayer = volumeTracingLayer(tracingId, tracing) + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + volumeLayer = volumeTracingLayer(tracingId, tracing, isTemporaryTracing = isTemporaryTracing) bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { val bucket = bucketStream.next() From f0c35baaceaac6a8e26e5efd95b87f210b02291d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 10:24:50 +0100 Subject: [PATCH 185/361] use temporary store when loading annotations --- app/models/annotation/AnnotationStore.scala | 4 +- .../TracingDataSourceTemporaryStore.scala | 2 +- .../controllers/DataSourceController.scala | 36 ++----- .../services/DataSourceRepository.scala | 6 +- .../services/uploading/ComposeService.scala | 21 +--- .../datastore/storage/TemporaryStore.scala | 6 +- .../annotation/TSAnnotationService.scala | 102 ++++++++---------- .../controllers/TSAnnotationController.scala | 14 ++- .../tracings/TemporaryTracingService.scala | 17 ++- .../volume/VolumeTracingBucketHelper.scala | 4 +- .../volume/VolumeTracingService.scala | 11 +- 11 files changed, 93 insertions(+), 130 deletions(-) diff --git a/app/models/annotation/AnnotationStore.scala b/app/models/annotation/AnnotationStore.scala index 14c0f6d1c42..76d0d3257b2 100755 --- a/app/models/annotation/AnnotationStore.scala +++ b/app/models/annotation/AnnotationStore.scala @@ -54,10 +54,10 @@ class AnnotationStore @Inject()( temporaryAnnotationStore.insert(id.toUniqueString, annotation, Some(cacheTimeout)) private def getFromCache(annotationId: AnnotationIdentifier): Option[Fox[Annotation]] = - temporaryAnnotationStore.find(annotationId.toUniqueString).map(Fox.successful(_)) + temporaryAnnotationStore.get(annotationId.toUniqueString).map(Fox.successful(_)) def findCachedByTracingId(tracingId: String): Box[Annotation] = { - val annotationOpt = temporaryAnnotationStore.findAll.find(a => a.annotationLayers.exists(_.tracingId == tracingId)) + val annotationOpt = temporaryAnnotationStore.getAll.find(a => a.annotationLayers.exists(_.tracingId == tracingId)) annotationOpt match { case Some(annotation) => Full(annotation) case None => Empty diff --git a/app/models/annotation/TracingDataSourceTemporaryStore.scala b/app/models/annotation/TracingDataSourceTemporaryStore.scala index 4140bf22269..f73e88f6fd4 100644 --- a/app/models/annotation/TracingDataSourceTemporaryStore.scala +++ b/app/models/annotation/TracingDataSourceTemporaryStore.scala @@ -19,6 +19,6 @@ class TracingDataSourceTemporaryStore @Inject()(temporaryStore: TemporaryStore[S temporaryStore.insert(tracingId, dataSource, Some(timeOut)) def find(tracingId: String): Option[DataSourceLike] = - temporaryStore.find(tracingId) + temporaryStore.get(tracingId) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index a088e55bae8..46c352abd5f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -4,44 +4,26 @@ import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong -import com.scalableminds.webknossos.datastore.explore.{ - ExploreRemoteDatasetRequest, - ExploreRemoteDatasetResponse, - ExploreRemoteLayerService -} -import com.scalableminds.webknossos.datastore.helpers.{ - GetMultipleSegmentIndexParameters, - GetSegmentIndexParameters, - SegmentIndexData, - SegmentStatisticsParameters -} +import com.scalableminds.webknossos.datastore.explore.{ExploreRemoteDatasetRequest, ExploreRemoteDatasetResponse, ExploreRemoteLayerService} +import com.scalableminds.webknossos.datastore.helpers.{GetMultipleSegmentIndexParameters, GetSegmentIndexParameters, SegmentIndexData, SegmentStatisticsParameters} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.datastore.services.uploading.{ - CancelUploadInformation, - ComposeRequest, - ComposeService, - ReserveManualUploadInformation, - ReserveUploadInformation, - UploadInformation, - UploadService -} +import com.scalableminds.webknossos.datastore.services.uploading._ +import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, DataVaultService} +import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.data.Form import play.api.data.Forms.{longNumber, nonEmptyText, number, tuple} import play.api.i18n.Messages +import play.api.libs.Files import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} import java.io.File -import com.scalableminds.webknossos.datastore.storage.{AgglomerateFileKey, DataVaultService} -import net.liftweb.common.{Box, Empty, Failure, Full} -import play.api.libs.Files - import java.net.URI import scala.collection.mutable.ListBuffer -import scala.concurrent.{ExecutionContext, Future} import scala.concurrent.duration._ +import scala.concurrent.{ExecutionContext, Future} class DataSourceController @Inject()( dataSourceRepository: DataSourceRepository, @@ -398,7 +380,7 @@ class DataSourceController @Inject()( UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId))) { for { _ <- Fox.successful(()) - dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( + dataSource <- dataSourceRepository.get(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) } yield Ok @@ -410,7 +392,7 @@ class DataSourceController @Inject()( Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { - _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( + _ <- bool2Fox(dataSourceRepository.get(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala index 645af371134..a8679c88734 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala @@ -1,13 +1,13 @@ package com.scalableminds.webknossos.datastore.services -import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.storage.TemporaryStore -import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging +import org.apache.pekko.actor.ActorSystem import play.api.i18n.{Messages, MessagesProvider} import scala.concurrent.ExecutionContext @@ -28,7 +28,7 @@ class DataSourceRepository @Inject()( } yield (dataSource, dataLayer) def findUsable(id: DataSourceId): Option[DataSource] = - find(id).flatMap(_.toUsable) + get(id).flatMap(_.toUsable) def updateDataSource(dataSource: InboxDataSource): Fox[Unit] = for { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index efed327ef37..df575fa04b4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -3,25 +3,10 @@ package com.scalableminds.webknossos.datastore.services.uploading import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.layers.{ - N5DataLayer, - N5SegmentationLayer, - PrecomputedDataLayer, - PrecomputedSegmentationLayer, - WKWDataLayer, - WKWSegmentationLayer, - Zarr3DataLayer, - Zarr3SegmentationLayer, - ZarrDataLayer, - ZarrSegmentationLayer -} +import com.scalableminds.webknossos.datastore.dataformats.layers._ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.services.{ - DSRemoteWebknossosClient, - DataSourceRepository, - DataSourceService -} +import com.scalableminds.webknossos.datastore.services.{DSRemoteWebknossosClient, DataSourceRepository, DataSourceService} import play.api.libs.json.{Json, OFormat} import java.nio.charset.StandardCharsets @@ -91,7 +76,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, for { dataSourceId <- Fox.successful( DataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) - dataSource <- Fox.option2Fox(dataSourceRepository.find(dataSourceId)) + dataSource <- Fox.option2Fox(dataSourceRepository.get(dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala index 1ec23abd39a..a290b5bc644 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/TemporaryStore.scala @@ -10,7 +10,7 @@ class TemporaryStore[K, V] @Inject()(system: ActorSystem) { lazy val map: scala.collection.mutable.Map[K, V] = scala.collection.mutable.Map() - def find(id: K): Option[V] = + def get(id: K): Option[V] = map.synchronized { map.get(id) } @@ -20,12 +20,12 @@ class TemporaryStore[K, V] @Inject()(system: ActorSystem) { map.contains(id) ) - def findAll: Seq[V] = + def getAll: Seq[V] = map.synchronized { map.values.toList } - def findAllConditionalWithKey(predicate: K => Boolean): scala.collection.Map[K, V] = + def getAllConditionalWithKey(predicate: K => Boolean): scala.collection.Map[K, V] = map.synchronized { map.view.filterKeys(predicate).toMap } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 2efa88ccb2e..17662c595c0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -7,47 +7,17 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} -import com.scalableminds.webknossos.datastore.Annotation.{ - AnnotationLayerProto, - AnnotationLayerTypeProto, - AnnotationProto -} +import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationLayerTypeProto, AnnotationProto} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ - EditableMappingLayer, - EditableMappingService, - EditableMappingUpdateAction, - EditableMappingUpdater -} +import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{EditableMappingLayer, EditableMappingService, EditableMappingUpdateAction, EditableMappingUpdater} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ - CreateNodeSkeletonAction, - DeleteNodeSkeletonAction, - SkeletonUpdateAction, - UpdateTracingSkeletonAction -} -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - ApplyableVolumeUpdateAction, - BucketMutatingVolumeUpdateAction, - MagRestrictions, - UpdateMappingNameVolumeAction, - VolumeTracingService -} -import com.scalableminds.webknossos.tracingstore.tracings.{ - FallbackDataHelper, - KeyValueStoreImplicits, - RemoteFallbackLayer, - SkeletonTracingMigrationService, - TracingDataStore, - TracingId, - TracingSelector, - VersionedKeyValuePair, - VolumeTracingMigrationService -} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{CreateNodeSkeletonAction, DeleteNodeSkeletonAction, SkeletonUpdateAction, UpdateTracingSkeletonAction} +import com.scalableminds.webknossos.tracingstore.tracings.volume._ import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Full} @@ -62,6 +32,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss skeletonTracingService: SkeletonTracingService, skeletonTracingMigrationService: SkeletonTracingMigrationService, volumeTracingMigrationService: VolumeTracingMigrationService, + temporaryTracingService: TemporaryTracingService, val remoteDatastoreClient: TSRemoteDatastoreClient, tracingDataStore: TracingDataStore) extends KeyValueStoreImplicits @@ -78,8 +49,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { - withTracings <- getWithTracings(annotationId, version) - } yield withTracings.annotation + isTemporaryTracing <- temporaryTracingService.isTemporaryAnnotation(annotationId) + annotation <- if (isTemporaryTracing) temporaryTracingService.getAnnotation(annotationId) + else + for { + withTracings <- getWithTracings(annotationId, version) + } yield withTracings.annotation + } yield annotation def getMultiple(annotationIds: Seq[String])(implicit ec: ExecutionContext, tc: TokenContext): Fox[Seq[AnnotationProto]] = @@ -206,6 +182,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } yield sourceAnnotation } + def saveAnnotationProto(annotationId: String, + version: Long, + annotationProto: AnnotationProto, + toTemporaryStore: Boolean = false): Fox[Unit] = + if (toTemporaryStore) + temporaryTracingService.saveAnnotationProto(annotationId, annotationProto) + else + tracingDataStore.annotations.put(annotationId, version, annotationProto) + def updateActionLog(annotationId: String, newestVersion: Long, oldestVersion: Long)( implicit ec: ExecutionContext): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[UpdateAction])): JsObject = @@ -474,7 +459,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } private def flushAnnotationInfo(annotationId: String, annotationWithTracings: AnnotationWithTracings) = - tracingDataStore.annotations.put(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) + saveAnnotationProto(annotationId, annotationWithTracings.version, annotationWithTracings.annotation) private def determineTargetVersion(annotationId: String, newestMaterializedAnnotation: AnnotationProto, @@ -552,31 +537,38 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tracingDataStore.skeletons .get[SkeletonTracing](tracingId, version, mayBeEmpty = Some(true))(fromProtoBytes[SkeletonTracing]) - def findVolume(annotationId: String, - tracingId: String, - version: Option[Long] = None, - fromTemporaryStore: Boolean = true // TODO - )(implicit tc: TokenContext, ec: ExecutionContext): Fox[VolumeTracing] = + def findVolume(annotationId: String, tracingId: String, version: Option[Long] = None)( + implicit tc: TokenContext, + ec: ExecutionContext): Fox[VolumeTracing] = for { - annotation <- getWithTracings(annotationId, version) - tracing <- annotation.getVolume(tracingId).toFox - migrated <- volumeTracingMigrationService.migrateTracing(tracing) - } yield migrated + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + tracing <- if (isTemporaryTracing) temporaryTracingService.getVolume(tracingId) + else + for { + annotation <- getWithTracings(annotationId, version) + tracing <- annotation.getVolume(tracingId).toFox + migrated <- volumeTracingMigrationService.migrateTracing(tracing) + } yield migrated + } yield tracing def findSkeleton( annotationId: String, tracingId: String, - version: Option[Long] = None, - fromTemporaryStoreu: Boolean = true // TODO + version: Option[Long] = None )(implicit tc: TokenContext, ec: ExecutionContext): Fox[SkeletonTracing] = if (tracingId == TracingId.dummy) Fox.successful(skeletonTracingService.dummyTracing) else { for { - annotation <- getWithTracings(annotationId, version) - tracing <- annotation.getSkeleton(tracingId).toFox - migrated <- skeletonTracingMigrationService.migrateTracing(tracing) - } yield migrated + isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(tracingId) + tracing <- if (isTemporaryTracing) temporaryTracingService.getSkeleton(tracingId) + else + for { + annotation <- getWithTracings(annotationId, version) + tracing <- annotation.getSkeleton(tracingId).toFox + migrated <- skeletonTracingMigrationService.migrateTracing(tracing) + } yield migrated + } yield tracing } def findMultipleVolumes(selectors: Seq[Option[TracingSelector]])( @@ -629,14 +621,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss v0DuplicatedAnnotation = v0Annotation.copy(annotationLayers = v0NewLayers, earliestAccessibleVersion = v0Annotation.version) - _ <- tracingDataStore.annotations.put(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) + _ <- saveAnnotationProto(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) // Duplicate current newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => duplicateLayer(annotationId, layer, tracingIdMap, currentAnnotation.version, isFromTask, datasetBoundingBox)) duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, earliestAccessibleVersion = currentAnnotation.version) - _ <- tracingDataStore.annotations.put(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) + _ <- saveAnnotationProto(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) } yield duplicatedAnnotation diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index e19480c8a22..bebe002531c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -27,9 +27,7 @@ class TSAnnotationController @Inject()( annotationService: TSAnnotationService, annotationTransactionService: AnnotationTransactionService, skeletonTracingService: SkeletonTracingService, - temporaryTracingService: TemporaryTracingService, - volumeTracingService: VolumeTracingService, - tracingDataStore: TracingDataStore)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) + volumeTracingService: VolumeTracingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with KeyValueStoreImplicits { @@ -38,10 +36,7 @@ class TSAnnotationController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.webknossos) { for { - _ <- if (toTemporaryStore) - temporaryTracingService.saveAnnotationProto(annotationId, request.body) - else - tracingDataStore.annotations.put(annotationId, 0L, request.body) + _ <- annotationService.saveAnnotationProto(annotationId, 0L, request.body, toTemporaryStore) } yield Ok } } @@ -216,7 +211,10 @@ class TSAnnotationController @Inject()( .withVersion(newTargetVersion) _ <- Fox.runOptional(mergedSkeletonOpt)( skeletonTracingService.saveSkeleton(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) - _ <- tracingDataStore.annotations.put(newAnnotationId, newTargetVersion, mergedAnnotation) // TODO toTemporaryStore + _ <- annotationService.saveAnnotationProto(newAnnotationId, + newTargetVersion, + mergedAnnotation, + toTemporaryStore) } yield Ok(mergedAnnotation.toByteArray).as(protobufMimeType) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala index acc9fb9dc0f..104b60c1784 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala @@ -39,11 +39,17 @@ class TemporaryTracingService @Inject()( private def temporaryAnnotationIdKey(tracingId: String) = s"temporaryTracingId___$tracingId" - def findVolumeBucket(bucketKey: String): Fox[Array[Byte]] = - volumeDataStore.find(bucketKey) + def getAnnotation(annotationId: String): Fox[AnnotationProto] = annotationStore.get(annotationId) - def findAllVolumeBucketsWithPrefix(bucketPrefix: String): collection.Map[String, Array[Byte]] = - volumeDataStore.findAllConditionalWithKey(key => key.startsWith(bucketPrefix)) + def getVolume(tracingId: String): Fox[VolumeTracing] = volumeStore.get(tracingId) + + def getSkeleton(tracingId: String): Fox[SkeletonTracing] = skeletonStore.get(tracingId) + + def getVolumeBucket(bucketKey: String): Fox[Array[Byte]] = + volumeDataStore.get(bucketKey) + + def getAllVolumeBucketsWithPrefix(bucketPrefix: String): collection.Map[String, Array[Byte]] = + volumeDataStore.getAllConditionalWithKey(key => key.startsWith(bucketPrefix)) def saveSkeleton(tracingId: String, skeletonTracing: SkeletonTracing): Fox[Unit] = { skeletonStore.insert(tracingId, skeletonTracing, Some(temporaryStoreTimeout)) @@ -68,6 +74,9 @@ class TemporaryTracingService @Inject()( Fox.successful(()) } + def isTemporaryAnnotation(annotationId: String): Fox[Boolean] = + temporaryTracingIdStore.contains(temporaryAnnotationIdKey(annotationId)) + def isTemporaryTracing(tracingId: String): Fox[Boolean] = temporaryTracingIdStore.contains(temporaryTracingIdKey(tracingId)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index ca48bb3e5e1..8a84fdbcce6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -182,7 +182,7 @@ trait VolumeTracingBucketHelper val dataFox = if (volumeTracingLayer.isTemporaryTracing) - temporaryTracingService.findVolumeBucket(bucketKey).map(VersionedKeyValuePair(VersionedKey(bucketKey, 0), _)) + temporaryTracingService.getVolumeBucket(bucketKey).map(VersionedKeyValuePair(VersionedKey(bucketKey, 0), _)) else volumeDataStore.get(bucketKey, version, mayBeEmpty = Some(true)) @@ -275,7 +275,7 @@ trait VolumeTracingBucketHelper def bucketStreamFromTemporaryStore(dataLayer: VolumeTracingLayer): Iterator[(BucketPosition, Array[Byte])] = { val keyPrefix = buildKeyPrefix(dataLayer.name) - val keyValuePairs = temporaryTracingService.findAllVolumeBucketsWithPrefix(keyPrefix) + val keyValuePairs = temporaryTracingService.getAllVolumeBucketsWithPrefix(keyPrefix) keyValuePairs.flatMap { case (bucketKey, data) => parseBucketKey(bucketKey, dataLayer.additionalAxes).map(tuple => (tuple._2, data)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index a9d23065c6a..1d1448619c6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -590,17 +590,13 @@ class VolumeTracingService @Inject()( additionalAxes = AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes) ) - def updateMagList(tracingId: String, - tracing: VolumeTracing, - mags: Set[Vec3Int], - toTemporaryStore: Boolean = false): Fox[String] = + def updateMagList(tracingId: String, tracing: VolumeTracing, mags: Set[Vec3Int]): Fox[String] = for { _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." _ <- bool2Fox(mags.nonEmpty) ?~> "Initializing without any mags. No data or mag restrictions too tight?" id <- saveVolume(tracing.copy(mags = mags.toList.sortBy(_.maxDim).map(vec3IntToProto)), Some(tracingId), - tracing.version, - toTemporaryStore) + tracing.version) } yield id def volumeBucketsAreEmpty(tracingId: String): Boolean = @@ -743,7 +739,8 @@ class VolumeTracingService @Inject()( } } - val shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings) + val shouldCreateSegmentIndex = !toTemporaryStore && volumeSegmentIndexService.shouldCreateSegmentIndexForMerged( + tracings) logger.info( s"Merging ${tracings.length} volume tracings into new $newId. CreateSegmentIndex = $shouldCreateSegmentIndex") From 03d62c875589e4b8f5d823b109c166e82d1bd845 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 10:24:50 +0100 Subject: [PATCH 186/361] fix mergedFromIds params --- app/controllers/UserTokenController.scala | 12 +++-------- .../annotation/TSAnnotationService.scala | 20 ++++++++++++++----- ...alableminds.webknossos.tracingstore.routes | 2 +- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 22127329f93..cf1d75dcfb8 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -1,19 +1,11 @@ package controllers -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.AccessMode.AccessMode -import com.scalableminds.webknossos.datastore.services.{ - AccessMode, - AccessResourceType, - UserAccessAnswer, - UserAccessRequest -} +import com.scalableminds.webknossos.datastore.services.{AccessMode, AccessResourceType, UserAccessAnswer, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.tracings.TracingId - -import javax.inject.Inject import models.annotation._ import models.dataset.{DataStoreService, DatasetDAO, DatasetService} import models.job.JobDAO @@ -22,9 +14,11 @@ import models.user.{User, UserService} import net.liftweb.common.{Box, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} +import play.silhouette.api.Silhouette import security.{RandomIDGenerator, URLSharing, WkEnv, WkSilhouetteEnvironment} import utils.{ObjectId, WkConf} +import javax.inject.Inject import scala.concurrent.ExecutionContext object RpcTokenHolder { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 17662c595c0..2275729f8c2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -624,11 +624,21 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- saveAnnotationProto(newAnnotationId, v0Annotation.version, v0DuplicatedAnnotation) // Duplicate current - newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)(layer => - duplicateLayer(annotationId, layer, tracingIdMap, currentAnnotation.version, isFromTask, datasetBoundingBox)) - duplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, - earliestAccessibleVersion = currentAnnotation.version) - _ <- saveAnnotationProto(newAnnotationId, currentAnnotation.version, duplicatedAnnotation) + duplicatedAnnotation <- if (currentAnnotation.version > 0L) { + for { + newLayers <- Fox.serialCombined(currentAnnotation.annotationLayers)( + layer => + duplicateLayer(annotationId, + layer, + tracingIdMap, + currentAnnotation.version, + isFromTask, + datasetBoundingBox)) + currentDuplicatedAnnotation = currentAnnotation.copy(annotationLayers = newLayers, + earliestAccessibleVersion = currentAnnotation.version) + _ <- saveAnnotationProto(newAnnotationId, currentAnnotation.version, currentDuplicatedAnnotation) + } yield currentDuplicatedAnnotation + } else Fox.successful(v0DuplicatedAnnotation) } yield duplicatedAnnotation diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 08aa0e77a02..93205b4153b 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -13,7 +13,7 @@ GET /annotation/:annotationId/updateActionStatistics GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) POST /annotation/:annotationId/resetToBase @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.resetToBase(annotationId: String) -POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(persist: Boolean, newAnnotationId: String) +POST /annotation/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.mergedFromIds(toTemporaryStore: Boolean, newAnnotationId: String) # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) From 2ef2b326e3f7d45455849f24a12e793d2944eb94 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 11:38:49 +0100 Subject: [PATCH 187/361] fix lookup in annotationStore --- app/controllers/AnnotationController.scala | 20 ++++------ app/controllers/AnnotationIOController.scala | 39 ++++++------------- app/controllers/UserTokenController.scala | 9 ++++- .../AnnotationInformationProvider.scala | 3 +- app/models/annotation/AnnotationStore.scala | 3 ++ 5 files changed, 31 insertions(+), 43 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index dc7eac3ccf7..10b9040ba68 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -1,15 +1,9 @@ package controllers -import org.apache.pekko.util.Timeout -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType} import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.{TracingId, TracingType} import mail.{MailchimpClient, MailchimpTag} @@ -23,9 +17,11 @@ import models.task.TaskDAO import models.team.{TeamDAO, TeamService} import models.user.time._ import models.user.{User, UserDAO, UserService} +import org.apache.pekko.util.Timeout import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import play.silhouette.api.Silhouette import security.{URLSharing, UserAwareRequestLogging, WkEnv} import telemetry.SlackNotificationService import utils.{ObjectId, WkConf} @@ -105,7 +101,7 @@ class AnnotationController @Inject()( timestamp: Option[Long]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND result <- info(annotation.typ.toString, id, timestamp)(request) } yield result @@ -115,8 +111,8 @@ class AnnotationController @Inject()( def merge(typ: String, id: String, mergedTyp: String, mergedId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotationA <- provider.provideAnnotation(typ, id, request.identity) ~> NOT_FOUND - annotationB <- provider.provideAnnotation(mergedTyp, mergedId, request.identity) ~> NOT_FOUND + annotationA <- provider.provideAnnotation(typ, id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND + annotationB <- provider.provideAnnotation(mergedTyp, mergedId, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND mergedAnnotation <- annotationMerger.mergeTwo(annotationA, annotationB, request.identity) ?~> "annotation.merge.failed" restrictions = annotationRestrictionDefaults.defaultsFor(mergedAnnotation) _ <- restrictions.allowAccess(request.identity) ?~> Messages("notAllowed") ~> FORBIDDEN @@ -128,14 +124,14 @@ class AnnotationController @Inject()( def mergeWithoutType(id: String, mergedTyp: String, mergedId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotation <- provider.provideAnnotation(id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND result <- merge(annotation.typ.toString, id, mergedTyp, mergedId)(request) } yield result } def reset(typ: String, id: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - annotation <- provider.provideAnnotation(typ, id, request.identity) ~> NOT_FOUND + annotation <- provider.provideAnnotation(typ, id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, annotation._team)) _ <- annotationService.resetToBase(annotation) ?~> "annotation.reset.failed" updated <- provider.provideAnnotation(typ, id, request.identity) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index a1c6997b2ad..90ca7e2b18a 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -1,12 +1,6 @@ package controllers import collections.SequenceUtils - -import java.io.{BufferedOutputStream, File, FileOutputStream} -import java.util.zip.Deflater -import org.apache.pekko.actor.ActorSystem -import org.apache.pekko.stream.Materializer -import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} @@ -14,47 +8,36 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType, - FetchedAnnotationLayer -} -import com.scalableminds.webknossos.datastore.models.datasource.{ - AbstractSegmentationLayer, - DataLayerLike, - DataSourceLike, - GenericDataSource, - SegmentationLayer -} +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType, FetchedAnnotationLayer} +import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - VolumeDataZipFormat, - VolumeTracingDefaults, - VolumeTracingMags -} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeDataZipFormat, VolumeTracingDefaults, VolumeTracingMags} import com.typesafe.scalalogging.LazyLogging - -import javax.inject.Inject import models.analytics.{AnalyticsService, DownloadAnnotationEvent, UploadAnnotationEvent} import models.annotation.AnnotationState._ import models.annotation._ import models.annotation.nml.NmlResults.{NmlParseResult, NmlParseSuccess} import models.annotation.nml.{NmlResults, NmlWriter} -import models.dataset.{DataStoreDAO, Dataset, DatasetDAO, DatasetService, WKRemoteDataStoreClient} +import models.dataset._ import models.organization.OrganizationDAO import models.project.ProjectDAO import models.task._ import models.user._ +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.Materializer import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} +import play.silhouette.api.Silhouette import security.WkEnv import utils.{ObjectId, WkConf} +import java.io.{BufferedOutputStream, File, FileOutputStream} +import java.util.zip.Deflater +import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} class AnnotationIOController @Inject()( @@ -400,7 +383,7 @@ class AnnotationIOController @Inject()( volumeDataZipFormat: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - annotation <- provider.provideAnnotation(id, request.identity) + annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND result <- download(annotation.typ.toString, id, skeletonVersion, diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index cf1d75dcfb8..427067a28de 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -37,6 +37,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, userService: UserService, organizationDAO: OrganizationDAO, annotationInformationProvider: AnnotationInformationProvider, + annotationStore: AnnotationStore, dataStoreService: DataStoreService, tracingStoreService: TracingStoreService, jobDAO: JobDAO, @@ -181,7 +182,13 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, Fox.successful(UserAccessAnswer(granted = true)) } else { for { - annotation <- annotationInformationProvider.provideAnnotation(annotationId, userBox)(GlobalAccessContext) ?~> "annotation.notFound" + annotationBox <- annotationInformationProvider + .provideAnnotation(annotationId, userBox)(GlobalAccessContext) + .futureBox + annotation <- annotationBox match { + case Full(_) => annotationBox.toFox + case _ => annotationStore.findInCache(annotationId).toFox + } annotationAccessByToken <- token .map(annotationPrivateLinkDAO.findOneByAccessToken) .getOrElse(Fox.empty) diff --git a/app/models/annotation/AnnotationInformationProvider.scala b/app/models/annotation/AnnotationInformationProvider.scala index 61a6561ffde..586fc74b26a 100755 --- a/app/models/annotation/AnnotationInformationProvider.scala +++ b/app/models/annotation/AnnotationInformationProvider.scala @@ -2,14 +2,13 @@ package models.annotation import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} - -import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.AnnotationInformationHandlerSelector import models.user.User import net.liftweb.common.Full import utils.ObjectId +import javax.inject.Inject import scala.concurrent.ExecutionContext class AnnotationInformationProvider @Inject()( diff --git a/app/models/annotation/AnnotationStore.scala b/app/models/annotation/AnnotationStore.scala index 76d0d3257b2..9e6e0125bd7 100755 --- a/app/models/annotation/AnnotationStore.scala +++ b/app/models/annotation/AnnotationStore.scala @@ -56,6 +56,9 @@ class AnnotationStore @Inject()( private def getFromCache(annotationId: AnnotationIdentifier): Option[Fox[Annotation]] = temporaryAnnotationStore.get(annotationId.toUniqueString).map(Fox.successful(_)) + def findInCache(annotationId: String): Box[Annotation] = + temporaryAnnotationStore.getAll.find(a => a._id.toString == annotationId) + def findCachedByTracingId(tracingId: String): Box[Annotation] = { val annotationOpt = temporaryAnnotationStore.getAll.find(a => a.annotationLayers.exists(_.tracingId == tracingId)) annotationOpt match { From 01276f4d80b0c01667e9ef7f83aa13b0fa0c65f9 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 11:55:23 +0100 Subject: [PATCH 188/361] create segment index for compound again --- app/models/annotation/AnnotationStore.scala | 2 -- .../tracingstore/tracings/volume/VolumeTracingService.scala | 3 +-- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/app/models/annotation/AnnotationStore.scala b/app/models/annotation/AnnotationStore.scala index 9e6e0125bd7..eb55c072934 100755 --- a/app/models/annotation/AnnotationStore.scala +++ b/app/models/annotation/AnnotationStore.scala @@ -19,8 +19,6 @@ class AnnotationStore @Inject()( private val cacheTimeout = 60 minutes - case class StoredResult(result: Fox[Annotation], timestamp: Long = System.currentTimeMillis) - def requestAnnotation(id: AnnotationIdentifier, user: Option[User])(implicit ctx: DBAccessContext): Fox[Annotation] = requestFromCache(id).getOrElse(requestFromHandler(id, user)).futureBox.recover { case e => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 1d1448619c6..43b1d0cbeea 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -739,8 +739,7 @@ class VolumeTracingService @Inject()( } } - val shouldCreateSegmentIndex = !toTemporaryStore && volumeSegmentIndexService.shouldCreateSegmentIndexForMerged( - tracings) + val shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings) logger.info( s"Merging ${tracings.length} volume tracings into new $newId. CreateSegmentIndex = $shouldCreateSegmentIndex") From f8f397a1348f028fea1899ed100278584173bd22 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 13:30:10 +0100 Subject: [PATCH 189/361] remove outdated comments --- webknossos-datastore/proto/Annotation.proto | 2 -- .../tracingstore/TSRemoteWebknossosClient.scala | 8 +------- .../tracings/skeleton/SkeletonTracingService.scala | 3 +-- 3 files changed, 2 insertions(+), 11 deletions(-) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 4aea0922056..e4893956467 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -40,5 +40,3 @@ message UpdateMetadataAnnotationUpdateAction { optional string name = 1; optional string description = 2; } - -// TODO restoreLayer? diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index f71adde04de..1c66b914234 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -11,12 +11,7 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{ - AccessTokenService, - RemoteWebknossosClient, - UserAccessAnswer, - UserAccessRequest -} +import com.scalableminds.webknossos.datastore.services.{AccessTokenService, RemoteWebknossosClient, UserAccessAnswer, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle @@ -83,7 +78,6 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[DataSourceId] ) - // TODO what about temporary/compound tracings? def getAnnotationIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[String] = annotationIdByTracingIdCache.getOrLoad( tracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index a16a53e7ba8..e88746baa76 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -14,8 +14,7 @@ import scala.concurrent.ExecutionContext class SkeletonTracingService @Inject()( tracingDataStore: TracingDataStore, - temporaryTracingService: TemporaryTracingService, - val tracingMigrationService: SkeletonTracingMigrationService // TODO why is this unused? + temporaryTracingService: TemporaryTracingService )(implicit val ec: ExecutionContext) extends KeyValueStoreImplicits with ProtoGeometryImplicits From b66423d0829bbe79866c75b86f43199ce6598366 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 18 Nov 2024 14:15:18 +0100 Subject: [PATCH 190/361] add todo comment for tracing stats --- .../javascripts/oxalis/model/accessors/annotation_accessor.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 092a5657d80..0f85f28596f 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -50,6 +50,7 @@ export function getStats(tracing: Tracing): CombinedTracingStats { const { skeleton, volumes } = tracing; let totalSegmentCount = 0; for (const volumeTracing of volumes) { + // TODOM: Update annotation stats according to the JSON and always send all layers totalSegmentCount += volumeTracing.segments.size(); } let stats: TracingStats = { From 14bd8aa8f0691036eccc923f954645def272349f Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 15:02:59 +0100 Subject: [PATCH 191/361] single version param for download route --- app/controllers/AnnotationIOController.scala | 38 ++++++++--------- .../WKRemoteTracingStoreController.scala | 19 ++------- app/models/annotation/Annotation.scala | 4 +- app/models/annotation/AnnotationService.scala | 23 ++++++---- conf/webknossos.latest.routes | 4 +- .../TSRemoteWebknossosClient.scala | 9 +--- .../AnnotationTransactionService.scala | 16 +++---- .../annotation/UpdateActions.scala | 42 ++----------------- .../EditableMappingService.scala | 3 -- 9 files changed, 54 insertions(+), 104 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 90ca7e2b18a..001d3c98b70 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -8,12 +8,21 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType, FetchedAnnotationLayer} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerStatistics, + AnnotationLayerType, + FetchedAnnotationLayer +} import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeDataZipFormat, VolumeTracingDefaults, VolumeTracingMags} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + VolumeDataZipFormat, + VolumeTracingDefaults, + VolumeTracingMags +} import com.typesafe.scalalogging.LazyLogging import models.analytics.{AnalyticsService, DownloadAnnotationEvent, UploadAnnotationEvent} import models.annotation.AnnotationState._ @@ -347,8 +356,7 @@ class AnnotationIOController @Inject()( // NML or Zip file containing skeleton and/or volume data of this annotation. In case of Compound annotations, multiple such annotations wrapped in another zip def download(typ: String, id: String, - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => @@ -368,8 +376,7 @@ class AnnotationIOController @Inject()( id, typ, request.identity, - skeletonVersion, - volumeVersion, + version, skipVolumeData.getOrElse(false), volumeDataZipFormatParsed.getOrElse(VolumeDataZipFormat.wkw)) ?~> "annotation.download.failed" } @@ -377,27 +384,20 @@ class AnnotationIOController @Inject()( } def downloadWithoutType(id: String, - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { annotation <- provider.provideAnnotation(id, request.identity) ?~> "annotation.notFound" ~> NOT_FOUND - result <- download(annotation.typ.toString, - id, - skeletonVersion, - volumeVersion, - skipVolumeData, - volumeDataZipFormat)(request) + result <- download(annotation.typ.toString, id, version, skipVolumeData, volumeDataZipFormat)(request) } yield result } private def downloadExplorational(annotationId: String, typ: String, issuingUser: Option[User], - skeletonVersion: Option[Long], - volumeVersion: Option[Long], + version: Option[Long], skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat)(implicit ctx: DBAccessContext) = { @@ -407,7 +407,7 @@ class AnnotationIOController @Inject()( for { tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedAnnotationLayers <- Fox.serialCombined(annotation.skeletonAnnotationLayers)( - tracingStoreClient.getSkeletonTracing(_, skeletonVersion)) + tracingStoreClient.getSkeletonTracing(_, version)) user <- userService.findOneCached(annotation._user)(GlobalAccessContext) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) nmlStream = nmlWriter.toNmlStream( @@ -439,14 +439,14 @@ class AnnotationIOController @Inject()( fetchedVolumeLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.volumeAnnotationLayers) { volumeAnnotationLayer => tracingStoreClient.getVolumeTracing(volumeAnnotationLayer, - volumeVersion, + version, skipVolumeData, volumeDataZipFormat, dataset.voxelSize) } ?~> "annotation.download.fetchVolumeLayer.failed" fetchedSkeletonLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.skeletonAnnotationLayers) { skeletonAnnotationLayer => - tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, skeletonVersion) + tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, version) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 4dbef690f7b..32e88b8b9a8 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -11,20 +11,9 @@ import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.AnnotationUpdatesReport import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.TracingId - -import javax.inject.Inject import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotationViewOnlyEvent} import models.annotation.AnnotationState._ -import models.annotation.{ - Annotation, - AnnotationDAO, - AnnotationDefaults, - AnnotationInformationProvider, - AnnotationLayerDAO, - AnnotationService, - TracingDataSourceTemporaryStore, - TracingStoreService -} +import models.annotation._ import models.dataset.{DatasetDAO, DatasetService} import models.organization.OrganizationDAO import models.user.UserDAO @@ -36,6 +25,7 @@ import scalapb.GeneratedMessage import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} import utils.{ObjectId, WkConf} +import javax.inject.Inject import scala.concurrent.ExecutionContext class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStoreService, @@ -98,9 +88,8 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore annotation <- annotationDAO.findOne(annotationId) _ <- ensureAnnotationNotFinished(annotation) _ <- annotationDAO.updateModified(annotation._id, Instant.now) - /*_ <- Fox.runOptional(report.statistics) { statistics => - annotationLayerDAO.updateStatistics(annotation._id, annotationId, statistics) - }*/ // TODO stats per tracing id. note: they might arrive before the layer is created. skip them then. + _ <- Fox.runOptional(report.statistics)(statistics => + annotationService.updateStatistics(annotation._id, statistics)) userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges _ <- Fox.runOptional(userBox)(user => diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index f4eccd723db..6a92eda3f6e 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -9,9 +9,9 @@ import com.scalableminds.webknossos.tracingstore.tracings.TracingType import models.annotation.AnnotationState._ import models.annotation.AnnotationType.AnnotationType import play.api.libs.json._ +import slick.jdbc.GetResult import slick.jdbc.GetResult._ import slick.jdbc.PostgresProfile.api._ -import slick.jdbc.GetResult import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction @@ -192,7 +192,7 @@ class AnnotationLayerDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionC def deleteAllForAnnotationQuery(annotationId: ObjectId): SqlAction[Int, NoStream, Effect] = q"DELETE FROM webknossos.annotation_layers WHERE _annotation = $annotationId".asUpdate - def updateStatistics(annotationId: ObjectId, tracingId: String, statistics: JsObject): Fox[Unit] = + def updateStatistics(annotationId: ObjectId, tracingId: String, statistics: JsValue): Fox[Unit] = for { _ <- run(q"""UPDATE webknossos.annotation_layers SET statistics = $statistics diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 4b100e42132..62f448704c9 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -1,7 +1,5 @@ package models.annotation -import org.apache.pekko.actor.ActorSystem -import org.apache.pekko.stream.Materializer import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} @@ -13,13 +11,7 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, Volu import com.scalableminds.webknossos.datastore.geometry.ColorProto import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType, - AnnotationSource, - FetchedAnnotationLayer -} +import com.scalableminds.webknossos.datastore.models.annotation._ import com.scalableminds.webknossos.datastore.models.datasource.{ AdditionalAxis, ElementClass, @@ -46,6 +38,8 @@ import models.task.{Task, TaskDAO, TaskService, TaskTypeDAO} import models.team.{TeamDAO, TeamService} import models.user.{User, UserDAO, UserService} import net.liftweb.common.{Box, Full} +import org.apache.pekko.actor.ActorSystem +import org.apache.pekko.stream.Materializer import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.{JsNull, JsObject, JsValue, Json} @@ -72,6 +66,7 @@ class AnnotationService @Inject()( annotationInformationProvider: AnnotationInformationProvider, savedTracingInformationHandler: SavedTracingInformationHandler, annotationDAO: AnnotationDAO, + annotationLayerDAO: AnnotationLayerDAO, userDAO: UserDAO, taskTypeDAO: TaskTypeDAO, taskService: TaskService, @@ -876,4 +871,14 @@ class AnnotationService @Inject()( "volume" } } + + def updateStatistics(annotationId: ObjectId, statistics: JsObject): Fox[Unit] = { + Fox.serialCombined(statistics.value.toSeq) { + case (tracingId, statisticsForTracing) => + annotationLayerDAO.updateStatistics(annotationId, tracingId, statisticsForTracing) + } + // TODO test + remove this line once frontend is adapted + Fox.successful(()) + } + } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 1b067b3821e..7fc6c73a208 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -149,13 +149,13 @@ PATCH /annotations/:typ/:id/editLockedState GET /annotations/:id/info controllers.AnnotationController.infoWithoutType(id: String, timestamp: Option[Long]) DELETE /annotations/:id controllers.AnnotationController.cancelWithoutType(id: String) POST /annotations/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.mergeWithoutType(id: String, mergedTyp: String, mergedId: String) -GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) +GET /annotations/:id/download controllers.AnnotationIOController.downloadWithoutType(id: String, version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) POST /annotations/:id/acquireMutex controllers.AnnotationController.tryAcquiringAnnotationMutex(id: String) GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Option[Long]) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) -GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) +GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, version: Option[Long], skipVolumeData: Option[Boolean], volumeDataZipFormat: Option[String]) GET /annotations/source/:accessTokenOrId controllers.AnnotationPrivateLinkController.annotationSource(accessTokenOrId: String, userToken: Option[String]) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 1c66b914234..e8c264e03e7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -8,7 +8,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing -import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{AccessTokenService, RemoteWebknossosClient, UserAccessAnswer, UserAccessRequest} @@ -22,7 +22,6 @@ import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt case class AnnotationUpdatesReport(annotationId: String, - // TODO stats per tracing id? coordinate with frontend timestamps: List[Instant], statistics: Option[JsObject], significantChangesCount: Int, @@ -89,12 +88,6 @@ class TSRemoteWebknossosClient @Inject()( .getWithJsonResponse[String] ) ?~> "annotation.idForTracing.failed" - def updateAnnotationLayers(annotationId: String, annotationLayers: List[AnnotationLayer]): Fox[Unit] = - rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotationLayers") - .addQueryString("annotationId" -> annotationId) - .addQueryString("key" -> tracingStoreKey) - .postJson(annotationLayers) - def updateAnnotation(annotationId: String, annotationProto: AnnotationProto): Fox[Unit] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotation") .addQueryString("annotationId" -> annotationId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 6bd9a1e7294..b8650aec15e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -2,19 +2,19 @@ package com.scalableminds.webknossos.tracingstore.annotation import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.bool2Fox -import com.scalableminds.webknossos.tracingstore.{ - TSRemoteWebknossosClient, - TracingStoreRedisStore, - AnnotationUpdatesReport -} -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} +import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.webknossos.tracingstore.tracings.volume.{ BucketMutatingVolumeUpdateAction, UpdateBucketVolumeAction, VolumeTracingService } +import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore, TracingId} +import com.scalableminds.webknossos.tracingstore.{ + AnnotationUpdatesReport, + TSRemoteWebknossosClient, + TracingStoreRedisStore +} import com.typesafe.scalalogging.LazyLogging import play.api.http.Status.CONFLICT import play.api.libs.json.Json @@ -281,7 +281,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe AnnotationUpdatesReport( annotationId, timestamps = updateGroups.map(g => Instant(g.timestamp)), - statistics = updateGroups.flatMap(_.stats).lastOption, // TODO statistics per tracing/layer + statistics = updateGroups.flatMap(_.stats).lastOption, significantChangesCount = updateGroups.map(_.significantChangesCount).sum, viewChangesCount = updateGroups.map(_.viewChangesCount).sum, tc.userTokenOpt diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index e9a6443a373..b4001397916 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -1,43 +1,9 @@ package com.scalableminds.webknossos.tracingstore.annotation -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ - MergeAgglomerateUpdateAction, - SplitAgglomerateUpdateAction -} -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ - CreateEdgeSkeletonAction, - CreateNodeSkeletonAction, - CreateTreeSkeletonAction, - DeleteEdgeSkeletonAction, - DeleteNodeSkeletonAction, - DeleteTreeSkeletonAction, - MergeTreeSkeletonAction, - MoveTreeComponentSkeletonAction, - UpdateNodeSkeletonAction, - UpdateTracingSkeletonAction, - UpdateTreeEdgesVisibilitySkeletonAction, - UpdateTreeGroupVisibilitySkeletonAction, - UpdateTreeGroupsSkeletonAction, - UpdateTreeSkeletonAction, - UpdateTreeVisibilitySkeletonAction, - UpdateUserBoundingBoxVisibilitySkeletonAction, - UpdateUserBoundingBoxesSkeletonAction -} -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - CreateSegmentVolumeAction, - DeleteSegmentDataVolumeAction, - DeleteSegmentVolumeAction, - ImportVolumeDataVolumeAction, - RemoveFallbackLayerVolumeAction, - UpdateBucketVolumeAction, - UpdateMappingNameVolumeAction, - UpdateSegmentGroupsVolumeAction, - UpdateSegmentVolumeAction, - UpdateTracingVolumeAction, - UpdateUserBoundingBoxVisibilityVolumeAction, - UpdateUserBoundingBoxesVolumeAction -} -import play.api.libs.json.{Format, JsError, JsObject, JsPath, JsResult, JsValue, Json, OFormat, Reads} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{MergeAgglomerateUpdateAction, SplitAgglomerateUpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ +import com.scalableminds.webknossos.tracingstore.tracings.volume._ +import play.api.libs.json._ trait UpdateAction { def actionTimestamp: Option[Long] diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 600e6b5349c..2a0e85e0cea 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -107,9 +107,6 @@ class EditableMappingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService - // TODO cache materialized stuff again, for e.g. faster bucket loading - // private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) - private lazy val segmentToAgglomerateChunkCache: AlfuCache[(String, Long, Long), Seq[(Long, Long)]] = AlfuCache() From 8549b21f2fc3360797dd2cdc67a127436d620dda Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 15:24:29 +0100 Subject: [PATCH 192/361] Fix unit tests, add two-level caching for materialized annotationWithTracings --- app/models/annotation/AnnotationService.scala | 4 ++- test/backend/VolumeBucketKeyTestSuite.scala | 12 +++---- .../annotation/TSAnnotationService.scala | 34 +++++++++++++++---- 3 files changed, 36 insertions(+), 14 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 62f448704c9..5637dafd98a 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -1,6 +1,7 @@ package models.annotation import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant @@ -19,7 +20,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayerLike => SegmentationLayer } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters +import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationLayerParameters, AnnotationWithTracings} import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, @@ -93,6 +94,7 @@ class AnnotationService @Inject()( with ProtoGeometryImplicits with AnnotationLayerPrecedence with LazyLogging { + implicit val actorSystem: ActorSystem = ActorSystem() val DefaultAnnotationListLimit = 1000 diff --git a/test/backend/VolumeBucketKeyTestSuite.scala b/test/backend/VolumeBucketKeyTestSuite.scala index 48db63a93f3..0e2d0a76f8b 100644 --- a/test/backend/VolumeBucketKeyTestSuite.scala +++ b/test/backend/VolumeBucketKeyTestSuite.scala @@ -26,14 +26,14 @@ class VolumeBucketKeyTestSuite extends PlaySpec { val bucketPos = BucketPosition(32, 64, 96, Vec3Int(1, 1, 1), None) "match defined bucket key" in { val key = bucketKeyBuilder.build(layerName, bucketPos) - assert(key == s"$layerName/1/53-[1,2,3]") + assert(key == s"$layerName/1/[1,2,3]") } "expands mag when anisotropic" in { val key = bucketKeyBuilder.build(layerName, BucketPosition(32, 64, 96, Vec3Int(4, 4, 1), None)) - assert(key == s"$layerName/4-4-1/36-[0,0,3]") + assert(key == s"$layerName/4-4-1/[0,0,3]") } "is parsed as the same bucket position" in { - bucketKeyBuilder.parse(s"$layerName/1/53-[1,2,3]", None) match { + bucketKeyBuilder.parse(s"$layerName/1/[1,2,3]", None) match { case Some((layer, parsedPos)) => assert(layer == layerName) assert(parsedPos == bucketPos) @@ -56,10 +56,10 @@ class VolumeBucketKeyTestSuite extends PlaySpec { bucketPos, Some(additionalAxes) ) - assert(key == s"$layerName/1/53-[4,5][1,2,3]") + assert(key == s"$layerName/1/[4,5][1,2,3]") } "is parsed as the same bucket position" in { - bucketKeyBuilder.parse(s"$layerName/1/53-[4,5][1,2,3]", Some(additionalAxes)) match { + bucketKeyBuilder.parse(s"$layerName/1/[4,5][1,2,3]", Some(additionalAxes)) match { case Some((layer, parsedPos)) => assert(layer == layerName) assert(parsedPos == bucketPos) @@ -73,7 +73,7 @@ class VolumeBucketKeyTestSuite extends PlaySpec { BucketPosition(32, 64, 96, Vec3Int(1, 1, 1), Some(additionalCoordinates.reverse)), Some(additionalAxes) ) - assert(key == s"$layerName/1/53-[4,5][1,2,3]") + assert(key == s"$layerName/1/[4,5][1,2,3]") } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 2275729f8c2..56821035fa1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -7,16 +7,30 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} -import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationLayerTypeProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{EditableMappingLayer, EditableMappingService, EditableMappingUpdateAction, EditableMappingUpdater} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + EditableMappingLayer, + EditableMappingService, + EditableMappingUpdateAction, + EditableMappingUpdater +} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{CreateNodeSkeletonAction, DeleteNodeSkeletonAction, SkeletonUpdateAction, UpdateTracingSkeletonAction} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ + CreateNodeSkeletonAction, + DeleteNodeSkeletonAction, + SkeletonUpdateAction, + UpdateTracingSkeletonAction +} import com.scalableminds.webknossos.tracingstore.tracings.volume._ import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging @@ -42,9 +56,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss with UpdateGroupHandling with LazyLogging { + // two-level caching: outer key: annotation id; inner key version + // This way we cache at most two versions of the same annotation, and at most 1000 different annotations private lazy val materializedAnnotationWithTracingCache = - // annotation id, version - AlfuCache[(String, Long), AnnotationWithTracings](maxCapacity = 1000) + AlfuCache[String, AlfuCache[Long, AnnotationWithTracings]](maxCapacity = 1000) + + private def newInnerCache(implicit ec: ExecutionContext): Fox[AlfuCache[Long, AnnotationWithTracings]] = + Fox.successful(AlfuCache[Long, AnnotationWithTracings](maxCapacity = 2)) def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = @@ -71,8 +89,10 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" // When requesting any other than the newest version, do not consider the changes final reportChangesToWk = version.isEmpty || version.contains(targetVersion) - updatedAnnotation <- materializedAnnotationWithTracingCache.getOrLoad( - (annotationId, targetVersion), + materializedAnnotationInnerCache <- materializedAnnotationWithTracingCache.getOrLoad(annotationId, + _ => newInnerCache) + updatedAnnotation <- materializedAnnotationInnerCache.getOrLoad( + targetVersion, _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) ) } yield updatedAnnotation From ac5bd226a127e6cc9861f8867d19905eb8998b09 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 15:41:17 +0100 Subject: [PATCH 193/361] unify logging --- .../TSRemoteWebknossosClient.scala | 10 ++++- .../annotation/TSAnnotationService.scala | 38 +++++++++---------- .../volume/VolumeTracingService.scala | 19 ++++++---- 3 files changed, 39 insertions(+), 28 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index e8c264e03e7..21e16adde51 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -11,7 +11,12 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{AccessTokenService, RemoteWebknossosClient, UserAccessAnswer, UserAccessRequest} +import com.scalableminds.webknossos.datastore.services.{ + AccessTokenService, + RemoteWebknossosClient, + UserAccessAnswer, + UserAccessRequest +} import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle @@ -58,6 +63,7 @@ class TSRemoteWebknossosClient @Inject()( .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) .withTokenFromContext + .silent .getWithJsonResponse[DataSourceLike] def getDataStoreUriForDataSource(organizationId: String, datasetName: String): Fox[String] = @@ -74,6 +80,7 @@ class TSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) + .silent .getWithJsonResponse[DataSourceId] ) @@ -92,6 +99,7 @@ class TSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/updateAnnotation") .addQueryString("annotationId" -> annotationId) .addQueryString("key" -> tracingStoreKey) + .silent .postProto(annotationProto) def createTracingFor(annotationId: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 56821035fa1..bc9cdcd3eab 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -103,10 +103,14 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" - _ = logger.info( - s"cache miss for $annotationId v$version, applying updates from ${annotationWithVersion.version} to $version...") annotation = annotationWithVersion.value - updated <- applyPendingUpdates(annotation, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" + annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForAnnotation.failed" + annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( + annotationId, + annotationWithTracings, + annotation.version, + version) // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters + updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" } yield updated def currentMaterializableVersion(annotationId: String): Fox[Long] = @@ -184,10 +188,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss // Note: works only if revert actions are in separate update groups for { _ <- bool2Fox(revertAction.sourceVersion >= annotationWithTracings.annotation.earliestAccessibleVersion) ?~> f"Trying to revert to ${revertAction.sourceVersion}, but earliest accessible is ${annotationWithTracings.annotation.earliestAccessibleVersion}" + before = Instant.now sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(revertAction.sourceVersion)) - _ = logger.info( - s"reverting to suorceVersion ${revertAction.sourceVersion}. got sourceAnnotation with version ${sourceAnnotation.version} with ${sourceAnnotation.skeletonStats}") _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, revertAction.sourceVersion, newVersion) + _ = Instant.logSince( + before, + s"Reverting annotation $annotationId from v${annotationWithTracings.version} to v${revertAction.sourceVersion}") } yield sourceAnnotation private def resetToBase(annotationId: String, annotationWithTracings: AnnotationWithTracings, newVersion: Long)( @@ -195,10 +201,11 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tc: TokenContext): Fox[AnnotationWithTracings] = { // Note: works only if reset actions are in separate update groups val sourceVersion = 0L // Tasks are always created with as v0 currently - logger.info(s"Resetting annotation $annotationId to base (v$sourceVersion)") + val before = Instant.now for { sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(sourceVersion)) _ <- revertDistributedElements(annotationWithTracings, sourceAnnotation, sourceVersion, newVersion) + _ = Instant.logSince(before, s"Resetting annotation $annotationId to base (v$sourceVersion)") } yield sourceAnnotation } @@ -263,19 +270,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss bool2Fox(!volumeTracing.mappingIsLocked.getOrElse(false)) ?~> "annotation.mappingIsLocked" private def applyPendingUpdates( - annotation: AnnotationProto, + annotationWithTracingsAndMappings: AnnotationWithTracings, annotationId: String, targetVersion: Long, reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - updateGroupsAsSaved <- findPendingUpdates(annotationId, annotation.version, targetVersion) ?~> "findPendingUpdates.failed" + updateGroupsAsSaved <- findPendingUpdates(annotationId, annotationWithTracingsAndMappings.version, targetVersion) ?~> "findPendingUpdates.failed" updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) - annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForAnnotation.failed" - annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( - annotationId, - annotationWithTracings, - annotation.version, - targetVersion) // Note: this targetVersion is overwritten for each update group, see annotation.withNewUpdaters updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped, @@ -317,7 +318,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss currentMaterializedVersion: Long, targetVersion: Long)(implicit ec: ExecutionContext, tc: TokenContext) = { val volumeWithEditableMapping = annotationWithTracings.volumesThatHaveEditableMapping - logger.info(s"fetching editable mappings ${volumeWithEditableMapping.map(_._2).mkString(",")}") for { idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { case (volumeTracing, volumeTracingId) => @@ -407,8 +407,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion: Long, reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { - logger.info(s"applying ${updates.length} to go from v${annotationWithTracings.version} to v$targetVersion") - def updateIter(annotationWithTracingsFox: Fox[AnnotationWithTracings], remainingUpdates: List[UpdateAction]): Fox[AnnotationWithTracings] = annotationWithTracingsFox.futureBox.flatMap { @@ -429,7 +427,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Some(annotationWithTracings.withNewUpdaters(annotationWithTracings.version, targetVersion)), updates) updatedWithNewVerson = updated.withVersion(targetVersion) - _ = logger.info(s"flushing v$targetVersion, with ${updated.skeletonStats}") _ <- updatedWithNewVerson.flushBufferedUpdates() _ <- flushUpdatedTracings(updatedWithNewVerson, updates) _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) @@ -860,8 +857,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss 0L, targetVersion) _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) - _ = logger.info( - s"Merging ${tracingsWithIds.length} editable mappings took ${Instant.since(before)} (applied ${linearizedEditableMappingUpdates.length} updates)") + _ = Instant.logSince( + before, + s"Merging ${tracingsWithIds.length} editable mappings by applying ${linearizedEditableMappingUpdates.length} updates") } yield targetVersion } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 43b1d0cbeea..1024f58abb0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -251,13 +251,12 @@ class VolumeTracingService @Inject()( sourceTracing: VolumeTracing, newVersion: Long, tracingBeforeRevert: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { + val before = Instant.now val dataLayer = volumeTracingLayer(tracingId, tracingBeforeRevert) val bucketStreamBeforeRevert = dataLayer.volumeBucketProvider.bucketStreamWithVersion(version = Some(tracingBeforeRevert.version)) - logger.info(s"reverting volume data from v${tracingBeforeRevert.version} to v$sourceVersion, creating v$newVersion") - for { fallbackLayer <- getFallbackLayer(tracingId, tracingBeforeRevert) segmentIndexBuffer = new VolumeSegmentIndexBuffer(tracingId, @@ -306,6 +305,9 @@ class VolumeTracingService @Inject()( } else Fox.successful(()) } _ <- segmentIndexBuffer.flush() + _ = Instant.logSince( + before, + s"Reverting volume data of $tracingId from v${tracingBeforeRevert.version} to v$sourceVersion, creating v$newVersion") } yield () } @@ -531,6 +533,7 @@ class VolumeTracingService @Inject()( newTracingId: String, newTracing: VolumeTracing)(implicit tc: TokenContext): Fox[Unit] = { var bucketCount = 0 + val before = Instant.now for { isTemporaryTracing <- temporaryTracingService.isTemporaryTracing(sourceTracingId) sourceDataLayer = volumeTracingLayer(sourceTracingId, sourceTracing, isTemporaryTracing) @@ -567,8 +570,9 @@ class VolumeTracingService @Inject()( } yield () } else Fox.successful(()) } - _ = logger.info( - s"Duplicated $bucketCount volume buckets from $sourceTracingId v${sourceTracing.version} to $newTracingId v${newTracing.version}.") + _ = Instant.logSince( + before, + s"Duplicating $bucketCount volume buckets from $sourceTracingId v${sourceTracing.version} to $newTracingId v${newTracing.version}.") _ <- segmentIndexBuffer.flush() } yield () } @@ -724,6 +728,7 @@ class VolumeTracingService @Inject()( newId: String, newVersion: Long, toTemporaryStore: Boolean)(implicit mp: MessagesProvider, tc: TokenContext): Fox[MergedVolumeStats] = { + val before = Instant.now val elementClass = tracings.headOption.map(_.elementClass).getOrElse(elementClassToProto(ElementClass.uint8)) val magSets = new mutable.HashSet[Set[Vec3Int]]() @@ -741,9 +746,6 @@ class VolumeTracingService @Inject()( val shouldCreateSegmentIndex = volumeSegmentIndexService.shouldCreateSegmentIndexForMerged(tracings) - logger.info( - s"Merging ${tracings.length} volume tracings into new $newId. CreateSegmentIndex = $shouldCreateSegmentIndex") - // If none of the tracings contained any volume data. Do not save buckets, do not touch mag list if (magSets.isEmpty) Fox.successful(MergedVolumeStats.empty(shouldCreateSegmentIndex)) @@ -804,6 +806,9 @@ class VolumeTracingService @Inject()( } yield () } _ <- segmentIndexBuffer.flush() + _ = Instant.logSince( + before, + s"Merging buckets from ${tracings.length} volume tracings into new $newId, with createSegmentIndex = $shouldCreateSegmentIndex") } yield mergedVolume.stats(shouldCreateSegmentIndex) } } From e7a464f94c2e1177ee67e2607d468f702cfd36ec Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 16:21:34 +0100 Subject: [PATCH 194/361] cleanup --- app/models/annotation/AnnotationService.scala | 4 +--- webknossos-datastore/proto/Annotation.proto | 20 ------------------- .../tracingstore/TracingStoreModule.scala | 2 -- .../AnnotationTransactionService.scala | 3 --- .../annotation/AnnotationWithTracings.scala | 5 +---- .../annotation/TSAnnotationService.scala | 2 +- .../tracings/TemporaryTracingService.scala | 6 +++--- .../tracings/volume/VolumeTracingLayer.scala | 2 +- .../volume/VolumeTracingService.scala | 3 +-- 9 files changed, 8 insertions(+), 39 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 5637dafd98a..ebe3f8b1833 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -1,7 +1,6 @@ package models.annotation import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} -import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.time.Instant @@ -20,7 +19,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ SegmentationLayerLike => SegmentationLayer } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationLayerParameters, AnnotationWithTracings} +import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MagRestrictions, @@ -479,7 +478,6 @@ class AnnotationService @Inject()( earliestAccessibleVersion = 0L ) _ <- tracingStoreClient.saveAnnotationProto(annotationBase._id, annotationBaseProto) - _ = logger.info(s"inserting base annotation ${annotationBase._id} for task ${task._id}") _ <- annotationDAO.insertOne(annotationBase) } yield () diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index e4893956467..c4087124c7d 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -20,23 +20,3 @@ message AnnotationLayerProto { required string name = 2; required AnnotationLayerTypeProto type = 4; } - -message AddLayerAnnotationUpdateAction { - required string name = 1; - required string tracingId = 2; - required AnnotationLayerTypeProto type = 5; -} - -message DeleteLayerAnnotationUpdateAction { - required string tracingId = 1; -} - -message UpdateLayerMetadataAnnotationUpdateAction { - required string tracingId = 1; - required string name = 2; -} - -message UpdateMetadataAnnotationUpdateAction { - optional string name = 1; - optional string description = 2; -} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index 8057d8fd91d..e67aaddec71 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -6,7 +6,6 @@ import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.webknossos.tracingstore.tracings.{TemporaryTracingService, TracingDataStore} import org.apache.pekko.actor.ActorSystem @@ -19,7 +18,6 @@ class TracingStoreModule extends AbstractModule { bind(classOf[ActorSystem]).annotatedWith(Names.named("webknossos-tracingstore")).toInstance(system) bind(classOf[TracingDataStore]).asEagerSingleton() bind(classOf[TemporaryTracingService]).asEagerSingleton() - bind(classOf[SkeletonTracingService]).asEagerSingleton() bind(classOf[VolumeTracingService]).asEagerSingleton() bind(classOf[TracingStoreAccessTokenService]).asEagerSingleton() bind(classOf[TSRemoteWebknossosClient]).asEagerSingleton() diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index b8650aec15e..f8a6ac78a2b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -187,9 +187,6 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe for { _ <- reportUpdates(annotationId, updateGroups) currentCommittedVersion: Fox[Long] = annotationService.currentMaterializableVersion(annotationId) - _ = logger.info(s"trying to commit ${updateGroups - .map(_.actions.length) - .sum} actions in ${updateGroups.length} groups (versions ${updateGroups.map(_.version).mkString(",")})") newVersion <- updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 15563fab4a0..71f5e483f91 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -91,7 +91,7 @@ case class AnnotationWithTracings( tracingsById = tracingsById.updated(tracingId, tracing) ) - def deleteTracing(a: DeleteLayerAnnotationAction): AnnotationWithTracings = + def deleteLayer(a: DeleteLayerAnnotationAction): AnnotationWithTracings = this.copy( annotation = annotation.copy(annotationLayers = annotation.annotationLayers.filter(_.tracingId != a.tracingId)), tracingsById = tracingsById.removed(a.tracingId) @@ -157,7 +157,4 @@ case class AnnotationWithTracings( } yield () } - def skeletonStats: String = - f"skeleton with ${getSkeletons.map(_._2).map(_.trees.map(_.nodes.length).sum).mkString} nodes" - } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index bc9cdcd3eab..b2363902f77 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -136,7 +136,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case a: AddLayerAnnotationAction => addLayer(annotationId, annotationWithTracings, a, targetVersion) case a: DeleteLayerAnnotationAction => - Fox.successful(annotationWithTracings.deleteTracing(a)) + Fox.successful(annotationWithTracings.deleteLayer(a)) case a: UpdateLayerMetadataAnnotationAction => Fox.successful(annotationWithTracings.updateLayerMetadata(a)) case a: UpdateMetadataAnnotationAction => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala index 104b60c1784..68f866653ae 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TemporaryTracingService.scala @@ -12,8 +12,8 @@ import javax.inject.Inject import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt -// This temporary store is for temporary tracings only (e.g. compound projects) -// and cannot be used for download or versioning +// This services holds temporary stores, meant for temporary tracings only (e.g. compound projects) +// They cannot be used for download or updating/versioning class TemporaryTracingService @Inject()( skeletonStore: TemporaryTracingStore[SkeletonTracing], volumeStore: TemporaryTracingStore[VolumeTracing], @@ -80,7 +80,7 @@ class TemporaryTracingService @Inject()( def isTemporaryTracing(tracingId: String): Fox[Boolean] = temporaryTracingIdStore.contains(temporaryTracingIdKey(tracingId)) - def assertTracingStillInCache(tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = + def assertTracingStillPresent(tracingId: String)(implicit ec: ExecutionContext): Fox[Unit] = for { _ <- bool2Fox(volumeStore.contains(tracingId)) ?~> "Temporary Volume Tracing expired" } yield () diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index c7d7e7aa874..f1e88112b3d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -49,7 +49,7 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = for { - _ <- temporaryTracingService.assertTracingStillInCache(layer.name) + _ <- temporaryTracingService.assertTracingStillPresent(layer.name) data <- loadBucket(layer, readInstruction.bucket, readInstruction.version) } yield data diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 1024f58abb0..c98c3525a97 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -126,7 +126,6 @@ class VolumeTracingService @Inject()( Fox.failure("Cannot mutate volume data in annotation with editable mapping.") } else updateBucket(tracingId, tracing, a, segmentIndexBuffer, newVersion) ?~> "Failed to save volume data." - //case a: RevertToVersionVolumeAction => revertToVolumeVersion(tracingId, a.sourceVersion, updateGroup.version, tracing, userToken) case a: DeleteSegmentDataVolumeAction => if (!tracing.getHasSegmentIndex) { Fox.failure("Cannot delete segment data for annotations without segment index.") @@ -328,7 +327,7 @@ class VolumeTracingService @Inject()( } mappingName <- selectMappingName(tracing) mags <- - // if none of the tracings contained any volume data do not save buckets, use full resolution list, as already initialized on wk-side + // if none of the tracings contained any volume data do not save buckets, use full mag list, as already initialized on wk-side if (magSets.isEmpty) Fox.successful(tracing.mags.map(vec3IntFromProto).toSet) else { From d6a8e8e62eb8d77c1a1be102d4abae932afce171 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 18 Nov 2024 19:58:31 +0100 Subject: [PATCH 195/361] init fossildb migration --- .gitignore | 3 +- .../fossildbapi_pb2.py | 77 +++ .../fossildbapi_pb2_grpc.py | 462 ++++++++++++++++++ .../main.py | 50 ++ 4 files changed, 591 insertions(+), 1 deletion(-) create mode 100644 tools/migration-unified-annotation-versioning/fossildbapi_pb2.py create mode 100644 tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py create mode 100644 tools/migration-unified-annotation-versioning/main.py diff --git a/.gitignore b/.gitignore index 3d0c1ff67ad..a9d185a95c5 100755 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,7 @@ RUNNING_PID .bloop .metals metals.sbt +__pycache__/ # Webknossos @@ -109,4 +110,4 @@ metals.sbt !.yarn/releases !.yarn/sdks !.yarn/versions -tools/**/.yarn/* \ No newline at end of file +tools/**/.yarn/* diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py new file mode 100644 index 00000000000..6eafe73fb96 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: fossildbapi.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"K\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xc2\x0c\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fossildbapi_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _HEALTHREQUEST._serialized_start=55 + _HEALTHREQUEST._serialized_end=70 + _HEALTHREPLY._serialized_start=72 + _HEALTHREPLY._serialized_end=124 + _GETREQUEST._serialized_start=126 + _GETREQUEST._serialized_end=208 + _GETREPLY._serialized_start=210 + _GETREPLY._serialized_end=297 + _PUTREQUEST._serialized_start=299 + _PUTREQUEST._serialized_end=376 + _PUTREPLY._serialized_start=378 + _PUTREPLY._serialized_end=427 + _DELETEREQUEST._serialized_start=429 + _DELETEREQUEST._serialized_end=494 + _DELETEREPLY._serialized_start=496 + _DELETEREPLY._serialized_end=548 + _GETMULTIPLEVERSIONSREQUEST._serialized_start=550 + _GETMULTIPLEVERSIONSREQUEST._serialized_end=657 + _GETMULTIPLEVERSIONSREPLY._serialized_start=659 + _GETMULTIPLEVERSIONSREPLY._serialized_end=758 + _GETMULTIPLEKEYSREQUEST._serialized_start=760 + _GETMULTIPLEKEYSREQUEST._serialized_end=875 + _GETMULTIPLEKEYSREPLY._serialized_start=877 + _GETMULTIPLEKEYSREPLY._serialized_end=992 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=994 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=1104 + _DELETEMULTIPLEVERSIONSREPLY._serialized_start=1106 + _DELETEMULTIPLEVERSIONSREPLY._serialized_end=1174 + _LISTKEYSREQUEST._serialized_start=1176 + _LISTKEYSREQUEST._serialized_end=1251 + _LISTKEYSREPLY._serialized_start=1253 + _LISTKEYSREPLY._serialized_end=1321 + _LISTVERSIONSREQUEST._serialized_start=1323 + _LISTVERSIONSREQUEST._serialized_end=1408 + _LISTVERSIONSREPLY._serialized_start=1410 + _LISTVERSIONSREPLY._serialized_end=1486 + _BACKUPREQUEST._serialized_start=1488 + _BACKUPREQUEST._serialized_end=1503 + _BACKUPREPLY._serialized_start=1505 + _BACKUPREPLY._serialized_end=1602 + _RESTOREFROMBACKUPREQUEST._serialized_start=1604 + _RESTOREFROMBACKUPREQUEST._serialized_end=1630 + _RESTOREFROMBACKUPREPLY._serialized_start=1632 + _RESTOREFROMBACKUPREPLY._serialized_end=1695 + _COMPACTALLDATAREQUEST._serialized_start=1697 + _COMPACTALLDATAREQUEST._serialized_end=1720 + _COMPACTALLDATAREPLY._serialized_start=1722 + _COMPACTALLDATAREPLY._serialized_end=1782 + _EXPORTDBREQUEST._serialized_start=1784 + _EXPORTDBREQUEST._serialized_end=1842 + _EXPORTDBREPLY._serialized_start=1844 + _EXPORTDBREPLY._serialized_end=1898 + _FOSSILDB._serialized_start=1901 + _FOSSILDB._serialized_end=3503 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py new file mode 100644 index 00000000000..7f738de9658 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py @@ -0,0 +1,462 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import fossildbapi_pb2 as fossildbapi__pb2 + + +class FossilDBStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Health = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Health', + request_serializer=fossildbapi__pb2.HealthRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.HealthReply.FromString, + ) + self.Get = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Get', + request_serializer=fossildbapi__pb2.GetRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetReply.FromString, + ) + self.GetMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleVersions', + request_serializer=fossildbapi__pb2.GetMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetMultipleVersionsReply.FromString, + ) + self.GetMultipleKeys = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeys', + request_serializer=fossildbapi__pb2.GetMultipleKeysRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetMultipleKeysReply.FromString, + ) + self.Put = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Put', + request_serializer=fossildbapi__pb2.PutRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.PutReply.FromString, + ) + self.Delete = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Delete', + request_serializer=fossildbapi__pb2.DeleteRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteReply.FromString, + ) + self.DeleteMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/DeleteMultipleVersions', + request_serializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteMultipleVersionsReply.FromString, + ) + self.ListKeys = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ListKeys', + request_serializer=fossildbapi__pb2.ListKeysRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ListKeysReply.FromString, + ) + self.ListVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ListVersions', + request_serializer=fossildbapi__pb2.ListVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ListVersionsReply.FromString, + ) + self.Backup = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/Backup', + request_serializer=fossildbapi__pb2.BackupRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.BackupReply.FromString, + ) + self.RestoreFromBackup = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/RestoreFromBackup', + request_serializer=fossildbapi__pb2.RestoreFromBackupRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.RestoreFromBackupReply.FromString, + ) + self.CompactAllData = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/CompactAllData', + request_serializer=fossildbapi__pb2.CompactAllDataRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.CompactAllDataReply.FromString, + ) + self.ExportDB = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/ExportDB', + request_serializer=fossildbapi__pb2.ExportDBRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.ExportDBReply.FromString, + ) + + +class FossilDBServicer(object): + """Missing associated documentation comment in .proto file.""" + + def Health(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Get(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetMultipleKeys(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Put(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Delete(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListKeys(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Backup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RestoreFromBackup(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CompactAllData(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ExportDB(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_FossilDBServicer_to_server(servicer, server): + rpc_method_handlers = { + 'Health': grpc.unary_unary_rpc_method_handler( + servicer.Health, + request_deserializer=fossildbapi__pb2.HealthRequest.FromString, + response_serializer=fossildbapi__pb2.HealthReply.SerializeToString, + ), + 'Get': grpc.unary_unary_rpc_method_handler( + servicer.Get, + request_deserializer=fossildbapi__pb2.GetRequest.FromString, + response_serializer=fossildbapi__pb2.GetReply.SerializeToString, + ), + 'GetMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.GetMultipleVersions, + request_deserializer=fossildbapi__pb2.GetMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.GetMultipleVersionsReply.SerializeToString, + ), + 'GetMultipleKeys': grpc.unary_unary_rpc_method_handler( + servicer.GetMultipleKeys, + request_deserializer=fossildbapi__pb2.GetMultipleKeysRequest.FromString, + response_serializer=fossildbapi__pb2.GetMultipleKeysReply.SerializeToString, + ), + 'Put': grpc.unary_unary_rpc_method_handler( + servicer.Put, + request_deserializer=fossildbapi__pb2.PutRequest.FromString, + response_serializer=fossildbapi__pb2.PutReply.SerializeToString, + ), + 'Delete': grpc.unary_unary_rpc_method_handler( + servicer.Delete, + request_deserializer=fossildbapi__pb2.DeleteRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteReply.SerializeToString, + ), + 'DeleteMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.DeleteMultipleVersions, + request_deserializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteMultipleVersionsReply.SerializeToString, + ), + 'ListKeys': grpc.unary_unary_rpc_method_handler( + servicer.ListKeys, + request_deserializer=fossildbapi__pb2.ListKeysRequest.FromString, + response_serializer=fossildbapi__pb2.ListKeysReply.SerializeToString, + ), + 'ListVersions': grpc.unary_unary_rpc_method_handler( + servicer.ListVersions, + request_deserializer=fossildbapi__pb2.ListVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.ListVersionsReply.SerializeToString, + ), + 'Backup': grpc.unary_unary_rpc_method_handler( + servicer.Backup, + request_deserializer=fossildbapi__pb2.BackupRequest.FromString, + response_serializer=fossildbapi__pb2.BackupReply.SerializeToString, + ), + 'RestoreFromBackup': grpc.unary_unary_rpc_method_handler( + servicer.RestoreFromBackup, + request_deserializer=fossildbapi__pb2.RestoreFromBackupRequest.FromString, + response_serializer=fossildbapi__pb2.RestoreFromBackupReply.SerializeToString, + ), + 'CompactAllData': grpc.unary_unary_rpc_method_handler( + servicer.CompactAllData, + request_deserializer=fossildbapi__pb2.CompactAllDataRequest.FromString, + response_serializer=fossildbapi__pb2.CompactAllDataReply.SerializeToString, + ), + 'ExportDB': grpc.unary_unary_rpc_method_handler( + servicer.ExportDB, + request_deserializer=fossildbapi__pb2.ExportDBRequest.FromString, + response_serializer=fossildbapi__pb2.ExportDBReply.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'com.scalableminds.fossildb.proto.FossilDB', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class FossilDB(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def Health(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Health', + fossildbapi__pb2.HealthRequest.SerializeToString, + fossildbapi__pb2.HealthReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Get(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Get', + fossildbapi__pb2.GetRequest.SerializeToString, + fossildbapi__pb2.GetReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleVersions', + fossildbapi__pb2.GetMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.GetMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetMultipleKeys(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeys', + fossildbapi__pb2.GetMultipleKeysRequest.SerializeToString, + fossildbapi__pb2.GetMultipleKeysReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Put(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Put', + fossildbapi__pb2.PutRequest.SerializeToString, + fossildbapi__pb2.PutReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Delete(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Delete', + fossildbapi__pb2.DeleteRequest.SerializeToString, + fossildbapi__pb2.DeleteReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def DeleteMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/DeleteMultipleVersions', + fossildbapi__pb2.DeleteMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.DeleteMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListKeys(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ListKeys', + fossildbapi__pb2.ListKeysRequest.SerializeToString, + fossildbapi__pb2.ListKeysReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ListVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ListVersions', + fossildbapi__pb2.ListVersionsRequest.SerializeToString, + fossildbapi__pb2.ListVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def Backup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/Backup', + fossildbapi__pb2.BackupRequest.SerializeToString, + fossildbapi__pb2.BackupReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def RestoreFromBackup(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/RestoreFromBackup', + fossildbapi__pb2.RestoreFromBackupRequest.SerializeToString, + fossildbapi__pb2.RestoreFromBackupReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def CompactAllData(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/CompactAllData', + fossildbapi__pb2.CompactAllDataRequest.SerializeToString, + fossildbapi__pb2.CompactAllDataReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def ExportDB(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/ExportDB', + fossildbapi__pb2.ExportDBRequest.SerializeToString, + fossildbapi__pb2.ExportDBReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py new file mode 100644 index 00000000000..fb9bc310053 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/main.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python3 + +import argparse +import grpc +import sys +import logging +import datetime + +import fossildbapi_pb2 as proto +import fossildbapi_pb2_grpc as proto_rpc + +MAX_MESSAGE_LENGTH = 2147483647 + +def main(): + + listKeysBatchSize = 300 + + src_stub = connect("localhost:2000") + dst_stub = connect("localhost:7199") + + test_health(src_stub, f"source fossildb at {src_host}") + test_health(dst_stub, f"destination fossildb at {dst_host}") + + + + +def connect(host): + MAX_MESSAGE_LENGTH = 2147483647 + channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", MAX_MESSAGE_LENGTH), ("grpc.max_receive_message_length", MAX_MESSAGE_LENGTH)]) + stub = proto_rpc.FossilDBStub(channel) + test_health(stub, f"fossildb at {host}") + return stub + + +def test_health(stub, label): + try: + reply = stub.Health(proto.HealthRequest()) + assertSuccess(reply) + print('successfully connected to ' + label) + except Exception as e: + print('failed to connect to ' + label + ': ' + str(e)) + sys.exit(1) + +def assert_success(reply): + if not reply.success: + raise Exception("reply.success failed: " + reply.errorMessage) + + +if __name__ == '__main__': + main() From 14d44c8afa87f4478c70a165e9e4bb245c37d579 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 19 Nov 2024 10:13:41 +0100 Subject: [PATCH 196/361] update frontend to newest annotation stats schema and fix typing & linting --- frontend/javascripts/admin/admin_rest_api.ts | 2 +- .../statistic/time_tracking_detail_view.tsx | 5 +- .../explorative_annotations_view.tsx | 9 +-- frontend/javascripts/oxalis/default_state.ts | 1 + .../model/accessors/annotation_accessor.ts | 63 +++++-------------- .../oxalis/model/reducers/reducer_helpers.ts | 4 ++ .../oxalis/model/reducers/save_reducer.ts | 9 +-- .../oxalis/model/sagas/save_saga.ts | 9 +-- .../oxalis/model/sagas/update_actions.ts | 8 +-- .../oxalis/model_initialization.ts | 5 -- frontend/javascripts/oxalis/store.ts | 5 +- .../view/action-bar/download_modal_view.tsx | 2 +- .../dataset_info_tab_view.tsx | 45 ++++++++----- .../javascripts/oxalis/view/version_entry.tsx | 27 ++++++++ .../backend-snapshot-tests/annotations.e2e.ts | 24 ++++--- .../skeletontracing_server_objects.ts | 1 - .../fixtures/tasktracing_server_objects.ts | 11 +++- .../fixtures/volumetracing_server_objects.ts | 10 ++- .../test/sagas/saga_integration.spec.ts | 8 +-- frontend/javascripts/types/api_flow_types.ts | 10 +-- 20 files changed, 130 insertions(+), 128 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 01e1ca4a96e..6efac118e0e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -820,7 +820,7 @@ export function createExplorational( export async function getTracingsForAnnotation( annotation: APIAnnotation, - version: number | null | undefined, + version?: number | null | undefined, ): Promise> { const skeletonLayers = annotation.annotationLayers.filter( (layer) => layer.typ === AnnotationLayerType.Skeleton, diff --git a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx index b60bb4dbd7f..4dc0d246ead 100644 --- a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx +++ b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx @@ -6,7 +6,6 @@ import { formatMilliseconds } from "libs/format_utils"; import _ from "lodash"; import type { APITimeTrackingPerAnnotation } from "types/api_flow_types"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; -import { aggregateStatsForAllLayers } from "oxalis/model/accessors/annotation_accessor"; import type { AnnotationTypeFilterEnum, AnnotationStateFilterEnum } from "oxalis/constants"; type TimeTrackingDetailViewProps = { @@ -40,7 +39,7 @@ const renderRow = ( @@ -63,7 +62,7 @@ const renderRow = ( diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 4e377eb6151..0cc3192b062 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -66,7 +66,6 @@ import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_acces import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; import type { SearchProps } from "antd/lib/input"; -import { getCombinedStatsFromServerAnnotation } from "oxalis/model/accessors/annotation_accessor"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { updateMetadataOfAnnotation } from "oxalis/model/sagas/update_actions"; @@ -697,7 +696,7 @@ class ExplorativeAnnotationsView extends React.PureComponent {
- {teamTags.length > 0 ? : null} + {teamTags.length > 0 ? : null}
{teamTags}
@@ -709,11 +708,7 @@ class ExplorativeAnnotationsView extends React.PureComponent { title: "Stats", width: 150, render: (__: any, annotation: APIAnnotationInfo) => ( - + ), }, { diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 08894a6b01f..02749ea70ec 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -178,6 +178,7 @@ const defaultState: OxalisState = { blockedByUser: null, annotationLayers: [], version: 0, + stats: {}, }, save: { queue: [], diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 0f85f28596f..aae3bc468b4 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,7 +1,5 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; -import type { APIAnnotationInfo } from "types/api_flow_types"; -import type { EmptyObject } from "types/globals"; export function mayEditAnnotationProperties(state: OxalisState) { const { owner, restrictions } = state.tracing; @@ -34,31 +32,16 @@ export type VolumeTracingStats = { segmentCount: number; }; -export type TracingStats = SkeletonTracingStats | VolumeTracingStats; -type TracingStatsHelper = { - treeCount?: number; - nodeCount?: number; - edgeCount?: number; - branchPointCount?: number; - segmentCount?: number; -}; - -// biome-ignore lint/complexity/noBannedTypes: {} should be avoided actually -export type CombinedTracingStats = (SkeletonTracingStats | {}) & (VolumeTracingStats | {}); +export type TracingStats = Record; -export function getStats(tracing: Tracing): CombinedTracingStats { +export function getStats(tracing: Tracing): TracingStats { + const stats: TracingStats = {}; const { skeleton, volumes } = tracing; - let totalSegmentCount = 0; for (const volumeTracing of volumes) { - // TODOM: Update annotation stats according to the JSON and always send all layers - totalSegmentCount += volumeTracing.segments.size(); + stats[volumeTracing.tracingId] = { segmentCount: volumeTracing.segments.size() }; } - let stats: TracingStats = { - segmentCount: totalSegmentCount, - }; if (skeleton) { - stats = { - ...stats, + stats[skeleton.tracingId] = { treeCount: _.size(skeleton.trees), nodeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.nodes.size(), 0), edgeCount: _.reduce(skeleton.trees, (sum, tree) => sum + tree.edges.size(), 0), @@ -78,34 +61,16 @@ export function getCreationTimestamp(tracing: Tracing) { return timestamp || 0; } -export function getCombinedStatsFromServerAnnotation( - annotation: APIAnnotationInfo, -): CombinedTracingStats { - return aggregateStatsForAllLayers( - annotation.annotationLayers.map((annotation) => annotation.stats), - ); -} - -export function aggregateStatsForAllLayers( - stats: Array, -): CombinedTracingStats { - const aggregatedStats: TracingStatsHelper = {}; - - for (const annotationLayerStats of stats) { - if ("treeCount" in annotationLayerStats) { - const { treeCount, nodeCount, edgeCount, branchPointCount } = annotationLayerStats; - aggregatedStats.treeCount = treeCount; - aggregatedStats.nodeCount = nodeCount; - aggregatedStats.edgeCount = edgeCount; - aggregatedStats.branchPointCount = branchPointCount; - } else if ("segmentCount" in annotationLayerStats) { - if (aggregatedStats.segmentCount == null) { - aggregatedStats.segmentCount = 0; - } - - aggregatedStats.segmentCount += annotationLayerStats.segmentCount; +export function getSkeletonStats(stats: TracingStats): SkeletonTracingStats | undefined { + for (const tracingId in stats) { + if ("treeCount" in stats[tracingId]) { + return stats[tracingId]; } } +} - return aggregatedStats; +export function getVolumeStats(stats: TracingStats): [string, VolumeTracingStats][] { + return Array.from(Object.entries(stats)).filter( + ([_tracingId, stat]) => "segmentCount" in stat, + ) as [string, VolumeTracingStats][]; } diff --git a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts index 409cbfde61e..837813f2e58 100644 --- a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts @@ -93,11 +93,13 @@ export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnot name, typ: annotationType, tracingStore, + stats, owner, contributors, othersMayEdit, isLockedByOwner, annotationLayers, + version, } = annotation; const restrictions = { ...annotation.restrictions, ...annotation.settings }; return { @@ -105,6 +107,8 @@ export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnot restrictions, visibility, tags, + version, + stats, description, name, annotationType, diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 2bfd14c7f49..61655226f1d 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -4,7 +4,7 @@ import type { Action } from "oxalis/model/actions/actions"; import type { OxalisState, SaveState } from "oxalis/store"; import type { SetVersionNumberAction } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; -import { type CombinedTracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; +import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import Date from "libs/date"; @@ -39,12 +39,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { // update actions. const dispatchedAction = action; const { items, transactionId } = dispatchedAction; - const stats: CombinedTracingStats | null = _.some( - dispatchedAction.items, - (ua) => ua.name !== "updateSkeletonTracing" && ua.name !== "updateVolumeTracing", - ) - ? getStats(state.tracing) - : null; + const stats: TracingStats = getStats(state.tracing); const { activeUser } = state; if (activeUser == null) { throw new Error("Tried to save something even though user is not logged in."); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 928816b7e89..0d543b93f70 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -38,7 +38,7 @@ import { SAVE_RETRY_WAITING_TIME, } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; import { Model } from "oxalis/singletons"; @@ -284,9 +284,10 @@ function* markBucketsAsNotDirty(saveQueue: Array) { for (const saveEntry of saveQueue) { for (const updateAction of saveEntry.actions) { if (updateAction.name === "updateBucket") { - // The ID must belong to a segmentation layer because we are handling - // an updateBucket action. - const { actionTracingId: tracingId } = updateAction.value; + // The ID must belong to a segmentation layer because we are handling an updateBucket + // action. Moreover, updateBucket is layer dependent and thus has an actionTracingId. + const { actionTracingId: tracingId } = + updateAction.value as UpdateActionWithTracingId["value"]; const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); const segmentationResolutionInfo = yield* call(getMagInfo, segmentationLayer.resolutions); diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index cbfdc61f9e5..65e3fa2d863 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -55,10 +55,10 @@ export type RevertToVersionUpdateAction = ReturnType; export type RemoveFallbackLayerUpdateAction = ReturnType; export type UpdateTdCameraUpdateAction = ReturnType; export type UpdateMappingNameUpdateAction = ReturnType; -type AddLayerToAnnotationUpdateAction = ReturnType; -type DeleteAnnotationLayerUpdateAction = ReturnType; -type UpdateAnnotationLayerNameUpdateAction = ReturnType; -type UpdateMetadataOfAnnotationUpdateAction = ReturnType; +export type AddLayerToAnnotationUpdateAction = ReturnType; +export type DeleteAnnotationLayerUpdateAction = ReturnType; +export type UpdateAnnotationLayerNameUpdateAction = ReturnType; +export type UpdateMetadataOfAnnotationUpdateAction = ReturnType; export type SplitAgglomerateUpdateAction = ReturnType; export type MergeAgglomerateUpdateAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 7a022cc4933..34a0e6f478d 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -139,15 +139,10 @@ export async function initialize( maybeOutdatedAnnotation.id, ); const layersWithStats = annotationFromTracingStore.annotationLayers.map((layer) => { - const matchingLayer = maybeOutdatedAnnotation.annotationLayers.find( - (l) => l.tracingId === layer.tracingId, - ); - return { tracingId: layer.tracingId, name: layer.name, typ: layer.type, - stats: matchingLayer?.stats || {}, }; }); const completeAnnotation = { diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 8d37bab0105..e68fb9ee00c 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -28,7 +28,7 @@ import type { AdditionalAxis, MetadataEntryProto, } from "types/api_flow_types"; -import type { CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; import type { Action } from "oxalis/model/actions/actions"; import type { BoundingBoxType, @@ -196,6 +196,7 @@ export type Annotation = { readonly visibility: AnnotationVisibility; readonly annotationLayers: Array; readonly tags: Array; + readonly stats: TracingStats | null | undefined; readonly description: string; readonly name: string; readonly tracingStore: APITracingStore; @@ -450,7 +451,7 @@ export type SaveQueueEntry = { transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; - stats: CombinedTracingStats | null | undefined; + stats: TracingStats | null | undefined; info: string; }; export type ProgressInfo = { diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index a0ef1d37e31..6cbd58e2b6b 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -355,7 +355,7 @@ function _DownloadModalView({ tracing.annotationId, tracing.annotationType, hasVolumeFallback, - {}, + undefined, fileFormatToDownload, includeVolumeData, ); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index bf5b4f783cd..abb3e3d3656 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -13,7 +13,12 @@ import { getMagnificationUnion, } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; -import { getStats, type CombinedTracingStats } from "oxalis/model/accessors/annotation_accessor"; +import { + getSkeletonStats, + getStats, + getVolumeStats, + type TracingStats, +} from "oxalis/model/accessors/annotation_accessor"; import { setAnnotationNameAction, setAnnotationDescriptionAction, @@ -29,6 +34,7 @@ import { getOrganization } from "admin/admin_rest_api"; import { MarkdownModal } from "../components/markdown_modal"; import FastTooltip from "components/fast_tooltip"; import messages from "messages"; +import type { EmptyObject } from "types/globals"; type StateProps = { annotation: Tracing; @@ -179,14 +185,24 @@ export function AnnotationStats({ asInfoBlock, withMargin, }: { - stats: CombinedTracingStats; + stats: TracingStats | EmptyObject | null | undefined; asInfoBlock: boolean; withMargin?: boolean | null | undefined; }) { + if (!stats || Object.keys(stats).length === 0) return null; const formatLabel = (str: string) => (asInfoBlock ? str : ""); const useStyleWithMargin = withMargin != null ? withMargin : true; const styleWithLargeMarginBottom = { marginBottom: 14 }; const styleWithSmallMargin = { margin: 2 }; + const skeletonStats = getSkeletonStats(stats); + const volumeStats = getVolumeStats(stats); + const totalSegmentCount = volumeStats.reduce((sum, [_, volume]) => sum + volume.segmentCount, 0); + const segmentCountDetails = volumeStats + .map( + ([layerName, volume]) => + `

${layerName}: ${volume.segmentCount} ${pluralize("Segment", volume.segmentCount)}

`, + ) + .join(""); return (
Statistics

} - {"treeCount" in stats ? ( + {skeletonStats && "treeCount" in skeletonStats ? ( Trees: ${safeNumberToStr(stats.treeCount)}

-

Nodes: ${safeNumberToStr(stats.nodeCount)}

-

Edges: ${safeNumberToStr(stats.edgeCount)}

-

Branchpoints: ${safeNumberToStr(stats.branchPointCount)}

+

Trees: ${safeNumberToStr(skeletonStats.treeCount)}

+

Nodes: ${safeNumberToStr(skeletonStats.nodeCount)}

+

Edges: ${safeNumberToStr(skeletonStats.edgeCount)}

+

Branchpoints: ${safeNumberToStr(skeletonStats.branchPointCount)}

`} wrapper="tr" > @@ -215,17 +231,18 @@ export function AnnotationStats({ />
) : null} - {"segmentCount" in stats ? ( + {volumeStats.length > 0 ? ( ) : null} diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index e363c9060b1..f807529acdd 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -36,9 +36,13 @@ import type { DeleteSegmentUpdateAction, MoveTreeComponentUpdateAction, MergeTreeUpdateAction, + UpdateAnnotationLayerNameUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, UpdateActionWithTracingId, + AddLayerToAnnotationUpdateAction, + DeleteAnnotationLayerUpdateAction, + UpdateMetadataOfAnnotationUpdateAction, } from "oxalis/model/sagas/update_actions"; import FormattedDate from "components/formatted_date"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; @@ -248,6 +252,29 @@ const descriptionFns: Record< }), updateSkeletonTracing: (): Description => updateTracingDescription, updateVolumeTracing: (): Description => updateTracingDescription, + addLayerToAnnotation: (action: AddLayerToAnnotationUpdateAction): Description => ({ + description: `Added the layer ${action.value.layerParameters.name} to the annotation.`, + icon: , + }), + deleteLayerFromAnnotation: (action: DeleteAnnotationLayerUpdateAction): Description => ({ + description: `Deleted the layer with id ${action.value.layerName} (${action.value.tracingId}) from the annotation.`, + icon: , + }), + updateLayerMetadata: (action: UpdateAnnotationLayerNameUpdateAction): Description => ({ + description: `Updated the name of the layer with id ${action.value.tracingId} to ${action.value.layerName}.`, + icon: , + }), + updateMetadataOfAnnotation: (action: UpdateMetadataOfAnnotationUpdateAction): Description => { + const updatedName = action.value.name != null; + const updatedDescription = action.value.description != null; + const updatedText = + updatedName && updatedDescription + ? "name and description" + : updatedName + ? "name" + : "description"; + return { description: `Updated the ${updatedText} of the annotation.`, icon: }; + }, } as const; function maybeGetReadableVolumeTracingName(tracing: HybridTracing, tracingId: string): string { diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index a1dc56c8bb0..d3a8b038100 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -6,13 +6,10 @@ import { writeTypeCheckingFile, } from "test/e2e-setup"; import type { APIAnnotation } from "types/api_flow_types"; -import { APIAnnotationTypeEnum } from "types/api_flow_types"; +import { AnnotationLayerType, APIAnnotationTypeEnum } from "types/api_flow_types"; import { createTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { diffTrees } from "oxalis/model/sagas/skeletontracing_saga"; -import { - getNullableSkeletonTracing, - getSkeletonDescriptor, -} from "oxalis/model/accessors/skeletontracing_accessor"; +import { getNullableSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_accessor"; import { sendRequestWithToken, addVersionNumbers } from "oxalis/model/sagas/save_saga"; import * as UpdateActions from "oxalis/model/sagas/update_actions"; @@ -79,27 +76,23 @@ test.serial("finishAnnotation() and reOpenAnnotation() for explorational", async test.serial("editAnnotation()", async (t) => { const annotationId = "68135c192faeb34c0081c05d"; const originalAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); - const { name, visibility, description } = originalAnnotation; + const { visibility } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; const newDescription = "new description"; await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { - name: newName, visibility: newVisibility, - description: newDescription, }); const editedAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); t.is(editedAnnotation.description, newDescription); t.is(editedAnnotation.id, annotationId); - t.is(editedAnnotation.annotationLayers[0].typ, "Skeleton"); + t.is(editedAnnotation.annotationLayers[0].typ, AnnotationLayerType.Skeleton); t.is(editedAnnotation.annotationLayers[0].tracingId, "ae417175-f7bb-4a34-8187-d9c3b50143af"); t.snapshot(replaceVolatileValues(editedAnnotation)); await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { - name, visibility, - description, }); }); test.serial("finishAllAnnotations()", async (t) => { @@ -151,7 +144,6 @@ test.serial("getTracingsForAnnotation() for hybrid", async (t) => { }); async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueEntry[]) { - console.log("explorational.annotationId:", explorational.annotationId); return sendRequestWithToken( `${explorational.tracingStore.url}/tracings/annotation/${explorational.id}/update?token=`, { @@ -162,6 +154,8 @@ async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueE ); } +// TODOM: Add tests for new update actions added in this pr (including updateAnnotationMetadata as this part of testing was removed editAnnotation() test case) + test.serial("Send update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); const initialSkeleton = { @@ -243,7 +237,11 @@ test("Update Metadata for Skeleton Tracing", async (t) => { }; const updateTreeAction = UpdateActions.updateTree(trees[1]); const [saveQueue] = addVersionNumbers( - createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789, createdExplorational.annotationLayers[0].tracingId), + createSaveQueueFromUpdateActions( + [createTreesUpdateActions, [updateTreeAction]], + 123456789, + createdExplorational.annotationLayers[0].tracingId, + ), 0, ); diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index a0a469e0864..5563394181c 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -183,7 +183,6 @@ export const annotation: APIAnnotation = { name: AnnotationLayerType.Skeleton, tracingId: "47e37793-d0be-4240-a371-87ce68561a13", typ: AnnotationLayerType.Skeleton, - stats: {}, }, ], dataSetName: "ROI2017_wkw", diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index cd1d0d08f28..94d97d7b621 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -1,4 +1,8 @@ -import type { ServerSkeletonTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerSkeletonTracing, + type APIAnnotation, + AnnotationLayerType, +} from "types/api_flow_types"; export const tracing: ServerSkeletonTracing = { typ: "Skeleton", @@ -68,6 +72,8 @@ export const annotation: APIAnnotation = { id: "5b1fd1cf97000027049c67ee", name: "", description: "", + stats: {}, + version: 0, typ: "Task", task: { id: "5b1fd1cb97000027049c67ec", @@ -119,8 +125,7 @@ export const annotation: APIAnnotation = { { name: "Skeleton", tracingId: "e90133de-b2db-4912-8261-8b6f84f7edab", - typ: "Skeleton", - stats: {}, + typ: AnnotationLayerType.Skeleton, }, ], dataSetName: "ROI2017_wkw", diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index 6915f6168e3..c6b13968fe3 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -1,4 +1,8 @@ -import type { ServerVolumeTracing, APIAnnotation } from "types/api_flow_types"; +import { + type ServerVolumeTracing, + type APIAnnotation, + AnnotationLayerType, +} from "types/api_flow_types"; export const tracing: ServerVolumeTracing = { typ: "Volume", activeSegmentId: 10000, @@ -69,6 +73,7 @@ export const tracing: ServerVolumeTracing = { export const annotation: APIAnnotation = { description: "", state: "Active", + version: 0, id: "598b52293c00009906f043e7", visibility: "Internal", modified: 1529066010230, @@ -86,8 +91,7 @@ export const annotation: APIAnnotation = { { name: "volume", tracingId: "tracingId-1234", - typ: "Volume", - stats: {}, + typ: AnnotationLayerType.Volume, }, ], dataSetName: "ROI2017_wkw", diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 62031c20d51..90f5de2a0ff 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -71,7 +71,7 @@ test.serial( ], TIMESTAMP, TaskTracing.id, - getStats(state.tracing, "skeleton", "irrelevant_in_skeleton_case") || undefined, + getStats(state.tracing) || undefined, ); // Reset the info field which is just for debugging purposes const actualSaveQueue = state.save.queue.map((entry) => { @@ -89,7 +89,7 @@ test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t const trees = generateDummyTrees(1000, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); t.is(Store.getState().save.queue.length, 1); - t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH); }); test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => { @@ -99,7 +99,7 @@ test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); const state = Store.getState(); t.true(state.save.queue.length > 1); - t.is(state.save.queue[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.is(state.save.queue[0].actions.length, MAXIMUM_ACTION_COUNT_PER_BATCH); }); test.serial("Save actions should be chunked after compacting (3/3)", (t) => { @@ -116,6 +116,6 @@ test.serial("Save actions should be chunked after compacting (3/3)", (t) => { const skeletonSaveQueue = Store.getState().save.queue; // There should only be one chunk t.is(skeletonSaveQueue.length, 1); - t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH.skeleton); + t.true(skeletonSaveQueue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH); t.is(skeletonSaveQueue[0].actions[1].name, "moveTreeComponent"); }); diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index ec1a1a88437..390cbb5268a 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -9,10 +9,7 @@ import type { MeshInformation, } from "oxalis/store"; import type { ServerUpdateAction } from "oxalis/model/sagas/update_actions"; -import type { - SkeletonTracingStats, - TracingStats, -} from "oxalis/model/accessors/annotation_accessor"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; import type { Vector3, Vector6, @@ -472,7 +469,6 @@ export type AnnotationLayerDescriptor = { name: string; tracingId: string; typ: AnnotationLayerType; - stats: TracingStats | EmptyObject; }; export type EditableLayerProperties = { name: string; @@ -487,7 +483,7 @@ export type APIAnnotationInfo = { readonly name: string; // Not used by the front-end anymore, but the // backend still serves this for backward-compatibility reasons. - readonly stats?: SkeletonTracingStats | EmptyObject; + readonly stats?: TracingStats | EmptyObject | null | undefined; readonly state: string; readonly isLockedByOwner: boolean; readonly tags: Array; @@ -573,7 +569,7 @@ export type APITimeTrackingPerAnnotation = { task: string | undefined; projectName: string | undefined; timeMillis: number; - annotationLayerStats: Array; + annotationLayerStats: TracingStats; }; type APITracingStoreAnnotationLayer = { tracingId: string; From cc8382bd05d032f7f398549f66aaca1aab7ed08b Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 19 Nov 2024 12:06:54 +0100 Subject: [PATCH 197/361] include annotation stats per layer in time tracking api response json --- app/models/user/time/TimeSpan.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/app/models/user/time/TimeSpan.scala b/app/models/user/time/TimeSpan.scala index fd5e831e3ae..4737363c4e6 100644 --- a/app/models/user/time/TimeSpan.scala +++ b/app/models/user/time/TimeSpan.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ import models.annotation.AnnotationState.AnnotationState import models.annotation.AnnotationType.AnnotationType -import play.api.libs.json.{JsArray, JsObject, JsValue, Json} +import play.api.libs.json.{JsObject, JsValue, Json} import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} import utils.ObjectId @@ -96,7 +96,7 @@ class TimeSpanDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) AND a.state IN ${SqlToken.tupleFromList(annotationStates)} GROUP BY a._id, t._id, p.name ) - SELECT ti._annotation, ti._task, ti.projectName, ti.timeSummed, JSON_AGG(al.statistics) AS layerStatistics + SELECT ti._annotation, ti._task, ti.projectName, ti.timeSummed, JSON_OBJECT_AGG(al.tracingId, al.statistics) AS layerStatistics FROM timeSummedPerAnnotation ti JOIN webknossos.annotation_layers al ON al._annotation = ti._annotation GROUP BY ti._annotation, ti._task, ti.projectName, ti.timeSummed @@ -104,7 +104,7 @@ class TimeSpanDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) """.as[(String, Option[String], Option[String], Long, String)] ) parsed = tuples.map { t => - val layerStats: JsArray = Json.parse(t._5).validate[JsArray].getOrElse(Json.arr()) + val layerStats: JsObject = Json.parse(t._5).validate[JsObject].getOrElse(Json.obj()) Json.obj( "annotation" -> t._1, "task" -> t._2, From aa02568dd9a81abd5e4d32cf60e0b56f13306c02 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 19 Nov 2024 12:11:03 +0100 Subject: [PATCH 198/361] =?UTF-8?q?continue=20if=20a=20layer=E2=80=99s=20s?= =?UTF-8?q?tats=20couldn=E2=80=99t=20be=20updated?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/controllers/WKRemoteTracingStoreController.scala | 3 +-- app/models/annotation/AnnotationService.scala | 8 +++----- 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 32e88b8b9a8..5b2ada6304a 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -88,8 +88,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore annotation <- annotationDAO.findOne(annotationId) _ <- ensureAnnotationNotFinished(annotation) _ <- annotationDAO.updateModified(annotation._id, Instant.now) - _ <- Fox.runOptional(report.statistics)(statistics => - annotationService.updateStatistics(annotation._id, statistics)) + _ = report.statistics.map(statistics => annotationService.updateStatistics(annotation._id, statistics)) userBox <- bearerTokenService.userForTokenOpt(report.userToken).futureBox trackTime = report.significantChangesCount > 0 || !wkConf.WebKnossos.User.timeTrackingOnlyWithSignificantChanges _ <- Fox.runOptional(userBox)(user => diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index ebe3f8b1833..5e8cb80423d 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -872,13 +872,11 @@ class AnnotationService @Inject()( } } - def updateStatistics(annotationId: ObjectId, statistics: JsObject): Fox[Unit] = { - Fox.serialCombined(statistics.value.toSeq) { + def updateStatistics(annotationId: ObjectId, statistics: JsObject): Unit = + // Fail silently, because the layer may not (yet/anymore) be present in postgres at this time + statistics.value.toSeq.map { case (tracingId, statisticsForTracing) => annotationLayerDAO.updateStatistics(annotationId, tracingId, statisticsForTracing) } - // TODO test + remove this line once frontend is adapted - Fox.successful(()) - } } From 03de53e991419384f6fa23e94f3f751255451c6a Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 19 Nov 2024 13:37:09 +0100 Subject: [PATCH 199/361] add some function stubs to migration --- .../main.py | 48 ++++++++++++++++++- 1 file changed, 46 insertions(+), 2 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index fb9bc310053..b041859301e 100644 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -9,8 +9,6 @@ import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc -MAX_MESSAGE_LENGTH = 2147483647 - def main(): listKeysBatchSize = 300 @@ -21,7 +19,53 @@ def main(): test_health(src_stub, f"source fossildb at {src_host}") test_health(dst_stub, f"destination fossildb at {dst_host}") + annotations = read_annotation_list() + + for annotation in annotations: + migrate_annotation(annotation) + + +def migrate_annotation(annotation): + print(f"Migrating annotation {annotation}") + # layerId → {version_before → version_after} + layer_version_mapping = migrate_updates(annotation) + migrate_materialized_layers(annotation, layer_version_mapping) + +def migrate_updates(annotation): + layers = annotation.layers + +def migrate_materialized_layers(annotation): + for layer in annotation.layers: + migrate_materialized_layer(layer, layer_version_mapping) + +def migrate_materialized_layer(layer, layer_version_mapping): + if layer.type == "Skeleton": + migrate_skeleton_proto(layer, layer_version_mapping) + if layer_type == "Volume" + migrate_volume_proto(layer, layer_version_mapping) + migrate_volume_buckets(layer, layer_version_mapping) + migrate_segment_index(layer, layer_version_mapping) + migrate_editable_mapping(layer, layer_version_mapping) + +def migrate_editable_mapping(layer, layer_version_mapping): + migrate_editable_mapping_info(layer, layer_version_mapping) + migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) + migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) + +def migrate_editable_mapping_info(layer, layer_version_mapping): + pass + +def migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping): + pass + +def migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping): + pass + +def insert_annotation_protos(annotation, layer_version_mapping): + pass +def read_annotation_list(): + return [] def connect(host): From 247fe9c732bfbcc34587d54602dd0d10923e9e5a Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 20 Nov 2024 11:48:54 +0100 Subject: [PATCH 200/361] wip migration: read annotations from postgres --- .editorconfig | 3 ++ .../main.py | 37 ++++++++++++++----- .../requirements.txt | 4 ++ 3 files changed, 34 insertions(+), 10 deletions(-) create mode 100644 tools/migration-unified-annotation-versioning/requirements.txt diff --git a/.editorconfig b/.editorconfig index 426b13cd4a2..4566e12eedd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -9,6 +9,9 @@ charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true +[*.py] +indent_size = 4 + [*.md] trim_trailing_whitespace = false diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index b041859301e..1a3ba77a0d0 100644 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -5,21 +5,21 @@ import sys import logging import datetime +import psycopg2 import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc + def main(): listKeysBatchSize = 300 - src_stub = connect("localhost:2000") - dst_stub = connect("localhost:7199") - - test_health(src_stub, f"source fossildb at {src_host}") - test_health(dst_stub, f"destination fossildb at {dst_host}") + # src_stub = connect("localhost:2000") + # dst_stub = connect("localhost:7199") annotations = read_annotation_list() + print(annotations) for annotation in annotations: migrate_annotation(annotation) @@ -34,19 +34,31 @@ def migrate_annotation(annotation): def migrate_updates(annotation): layers = annotation.layers -def migrate_materialized_layers(annotation): +def migrate_materialized_layers(annotation, layer_version_mapping): for layer in annotation.layers: migrate_materialized_layer(layer, layer_version_mapping) def migrate_materialized_layer(layer, layer_version_mapping): if layer.type == "Skeleton": migrate_skeleton_proto(layer, layer_version_mapping) - if layer_type == "Volume" + if layer.type == "Volume": migrate_volume_proto(layer, layer_version_mapping) migrate_volume_buckets(layer, layer_version_mapping) migrate_segment_index(layer, layer_version_mapping) migrate_editable_mapping(layer, layer_version_mapping) +def migrate_skeleton_proto(layer, layer_version_mapping): + pass + +def migrate_volume_proto(layer, layer_version_mapping): + pass + +def migrate_volume_buckets(layer, layer_version_mapping): + pass + +def migrate_segment_index(layer, layer_version_mapping): + pass + def migrate_editable_mapping(layer, layer_version_mapping): migrate_editable_mapping_info(layer, layer_version_mapping) migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) @@ -64,9 +76,14 @@ def migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping def insert_annotation_protos(annotation, layer_version_mapping): pass -def read_annotation_list(): - return [] +def read_annotation_list(): + connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') + cursor = connection.cursor() + query = "SELECT _id FROM webknossos.annotations" + cursor.execute(query) + records = cursor.fetchall() + return records def connect(host): MAX_MESSAGE_LENGTH = 2147483647 @@ -79,7 +96,7 @@ def connect(host): def test_health(stub, label): try: reply = stub.Health(proto.HealthRequest()) - assertSuccess(reply) + assert_success(reply) print('successfully connected to ' + label) except Exception as e: print('failed to connect to ' + label + ': ' + str(e)) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt new file mode 100644 index 00000000000..1b2de89fb3b --- /dev/null +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -0,0 +1,4 @@ +grpcio +argparse +psycopg2 +protobuf From 44bf6fc27fda9f9747f6c6c371ef03a11f3392a0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 20 Nov 2024 14:17:37 +0100 Subject: [PATCH 201/361] fetch annotation info from postgres --- .../main.py | 88 ++++++++++++++----- 1 file changed, 67 insertions(+), 21 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 1a3ba77a0d0..bef467c68c6 100644 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -6,46 +6,74 @@ import logging import datetime import psycopg2 +import psycopg2.extras +import math +import logging +import datetime import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) + def main(): + setup_logging() + logger.info("Hello from Unified Annotation Versioning Migration!") listKeysBatchSize = 300 # src_stub = connect("localhost:2000") # dst_stub = connect("localhost:7199") - annotations = read_annotation_list() - print(annotations) + start_time = datetime.datetime.now() + + logger.info(f"Using start time {start_time}") + + annotations = read_annotation_list(start_time) for annotation in annotations: migrate_annotation(annotation) +def setup_logging(): + root = logging.getLogger() + root.setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s") + handler.setFormatter(formatter) + root.addHandler(handler) + + def migrate_annotation(annotation): - print(f"Migrating annotation {annotation}") + print(f"Migrating annotation {annotation['_id']}...") # layerId → {version_before → version_after} layer_version_mapping = migrate_updates(annotation) migrate_materialized_layers(annotation, layer_version_mapping) + def migrate_updates(annotation): - layers = annotation.layers + print(annotation) + layers = annotation["layers"] + def migrate_materialized_layers(annotation, layer_version_mapping): - for layer in annotation.layers: - migrate_materialized_layer(layer, layer_version_mapping) - -def migrate_materialized_layer(layer, layer_version_mapping): - if layer.type == "Skeleton": - migrate_skeleton_proto(layer, layer_version_mapping) - if layer.type == "Volume": - migrate_volume_proto(layer, layer_version_mapping) - migrate_volume_buckets(layer, layer_version_mapping) - migrate_segment_index(layer, layer_version_mapping) - migrate_editable_mapping(layer, layer_version_mapping) + for tracing_id in annotation["layers"]: + migrate_materialized_layer(tracing_id, annotation["layers"][tracing_id], layer_version_mapping) + + +def migrate_materialized_layer(tracing_id, layer_type, layer_version_mapping): + if layer_type == "Skeleton": + migrate_skeleton_proto(tracing_id, layer_version_mapping) + if layer_type == "Volume": + migrate_volume_proto(tracing_id, layer_version_mapping) + migrate_volume_buckets(tracing_id, layer_version_mapping) + migrate_segment_index(tracing_id, layer_version_mapping) + migrate_editable_mapping(tracing_id, layer_version_mapping) + def migrate_skeleton_proto(layer, layer_version_mapping): pass @@ -77,13 +105,31 @@ def insert_annotation_protos(annotation, layer_version_mapping): pass -def read_annotation_list(): +def read_annotation_list(start_time: datetime): + logger.info("Determining annotation count from postgres...") + page_size = 1 connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') - cursor = connection.cursor() - query = "SELECT _id FROM webknossos.annotations" - cursor.execute(query) - records = cursor.fetchall() - return records + cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") + cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") + annotation_count = cursor.fetchone()['count'] + logger.info(f"Loading infos of {annotation_count} annotations from postgres...") + annotations = [] + for page_num in range(math.ceil(annotation_count / page_size)): + query = f""" + SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers + FROM webknossos.annotation_layers al + JOIN webknossos.annotations a on al._annotation = a._id + WHERE a.modified < {modified_str} + GROUP BY a._id + ORDER BY a._id + LIMIT {page_size} + OFFSET {page_size * page_num} + """ + cursor.execute(query) + annotations += cursor.fetchall() + return annotations + def connect(host): MAX_MESSAGE_LENGTH = 2147483647 From c7af0fba815ec4e47ab672213aa758b88f7aa7de Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 20 Nov 2024 15:57:38 +0100 Subject: [PATCH 202/361] iterate on migration --- .../main.py | 41 +++++++++++++------ .../requirements.txt | 1 + 2 files changed, 30 insertions(+), 12 deletions(-) mode change 100644 => 100755 tools/migration-unified-annotation-versioning/main.py diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py old mode 100644 new mode 100755 index bef467c68c6..ab481f55348 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -7,9 +7,13 @@ import datetime import psycopg2 import psycopg2.extras +from psycopg2.extras import RealDictRow import math import logging import datetime +import time +from typing import Dict +from rich.progress import track import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc @@ -24,8 +28,8 @@ def main(): listKeysBatchSize = 300 - # src_stub = connect("localhost:2000") - # dst_stub = connect("localhost:7199") + # src_stub = connect_to_fossildb("localhost:2000") + # dst_stub = connect_to_fossildb("localhost:7199") start_time = datetime.datetime.now() @@ -49,18 +53,19 @@ def setup_logging(): def migrate_annotation(annotation): - print(f"Migrating annotation {annotation['_id']}...") + print(f"Migrating annotation {annotation['_id']} ...") # layerId → {version_before → version_after} layer_version_mapping = migrate_updates(annotation) migrate_materialized_layers(annotation, layer_version_mapping) -def migrate_updates(annotation): - print(annotation) +def migrate_updates(annotation) -> Dict[str, Dict[int, int]]: layers = annotation["layers"] + # TODO + return {} -def migrate_materialized_layers(annotation, layer_version_mapping): +def migrate_materialized_layers(annotation: RealDictRow, layer_version_mapping): for tracing_id in annotation["layers"]: migrate_materialized_layer(tracing_id, annotation["layers"][tracing_id], layer_version_mapping) @@ -78,44 +83,54 @@ def migrate_materialized_layer(tracing_id, layer_type, layer_version_mapping): def migrate_skeleton_proto(layer, layer_version_mapping): pass + def migrate_volume_proto(layer, layer_version_mapping): pass + def migrate_volume_buckets(layer, layer_version_mapping): pass + def migrate_segment_index(layer, layer_version_mapping): pass + def migrate_editable_mapping(layer, layer_version_mapping): migrate_editable_mapping_info(layer, layer_version_mapping) migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) + def migrate_editable_mapping_info(layer, layer_version_mapping): pass + def migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping): pass + def migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping): pass + def insert_annotation_protos(annotation, layer_version_mapping): pass def read_annotation_list(start_time: datetime): + before = time.time() logger.info("Determining annotation count from postgres...") - page_size = 1 + page_size = 100 connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") annotation_count = cursor.fetchone()['count'] - logger.info(f"Loading infos of {annotation_count} annotations from postgres...") + logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") annotations = [] - for page_num in range(math.ceil(annotation_count / page_size)): + page_count = math.ceil(annotation_count / page_size) + for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): query = f""" SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers FROM webknossos.annotation_layers al @@ -128,12 +143,13 @@ def read_annotation_list(start_time: datetime): """ cursor.execute(query) annotations += cursor.fetchall() + logger.info(f"Loading annotations took {time.time() - before} s") return annotations -def connect(host): - MAX_MESSAGE_LENGTH = 2147483647 - channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", MAX_MESSAGE_LENGTH), ("grpc.max_receive_message_length", MAX_MESSAGE_LENGTH)]) +def connect_to_fossildb(host): + max_message_length = 2147483647 + channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) stub = proto_rpc.FossilDBStub(channel) test_health(stub, f"fossildb at {host}") return stub @@ -148,6 +164,7 @@ def test_health(stub, label): print('failed to connect to ' + label + ': ' + str(e)) sys.exit(1) + def assert_success(reply): if not reply.success: raise Exception("reply.success failed: " + reply.errorMessage) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt index 1b2de89fb3b..65865cf0f9a 100644 --- a/tools/migration-unified-annotation-versioning/requirements.txt +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -2,3 +2,4 @@ grpcio argparse psycopg2 protobuf +rich From 4cb9b75047d1c6db2d679496bb5bd3747838e1ff Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 21 Nov 2024 14:31:02 +0100 Subject: [PATCH 203/361] wip migrate updates --- .../main.py | 189 ++++++++++-------- 1 file changed, 104 insertions(+), 85 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index ab481f55348..b1859b97ac8 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -3,8 +3,6 @@ import argparse import grpc import sys -import logging -import datetime import psycopg2 import psycopg2.extras from psycopg2.extras import RealDictRow @@ -19,27 +17,14 @@ import fossildbapi_pb2_grpc as proto_rpc logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) def main(): setup_logging() logger.info("Hello from Unified Annotation Versioning Migration!") - - listKeysBatchSize = 300 - - # src_stub = connect_to_fossildb("localhost:2000") - # dst_stub = connect_to_fossildb("localhost:7199") - - start_time = datetime.datetime.now() - - logger.info(f"Using start time {start_time}") - - annotations = read_annotation_list(start_time) - - for annotation in annotations: - migrate_annotation(annotation) - + args = {} + migration = Migration(args) + migration.run() def setup_logging(): root = logging.getLogger() @@ -51,100 +36,134 @@ def setup_logging(): handler.setFormatter(formatter) root.addHandler(handler) +class Migration: + + def __init__(self, args): + self.args = args + self.src_stub = connect_to_fossildb("localhost:7155") + # self.dst_stub = connect_to_fossildb("localhost:7199") + + def run(self): + start_time = datetime.datetime.now() + + logger.info(f"Using start time {start_time}") + + annotations = self.read_annotation_list(start_time) + + for annotation in annotations: + self.migrate_annotation(annotation) + + + + + def migrate_annotation(self, annotation): + print(f"Migrating annotation {annotation['_id']} ...") + # layerId → {version_before → version_after} + layer_version_mapping = self.migrate_updates(annotation) + self.migrate_materialized_layers(annotation, layer_version_mapping) -def migrate_annotation(annotation): - print(f"Migrating annotation {annotation['_id']} ...") - # layerId → {version_before → version_after} - layer_version_mapping = migrate_updates(annotation) - migrate_materialized_layers(annotation, layer_version_mapping) + def migrate_updates(self, annotation) -> Dict[str, Dict[int, int]]: + layers = annotation["layers"] + global_version = 0 + for tracing_id, tracing_type in annotation["layers"].items(): + old_newest_version = self.get_newest_version(tracing_id, tracing_type) + # TODO + return {} -def migrate_updates(annotation) -> Dict[str, Dict[int, int]]: - layers = annotation["layers"] - # TODO - return {} + def get_newest_version(self, tracing_id: str, layer_type: str) -> int: + collection = self.update_collection_for_layer_type(layer_type) + getReply = self.src_stub.Get( + proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) + ) + assert_success(getReply) + return getReply.actualVersion + def update_collection_for_layer_type(self, layer_type): + if layer_type == "Skeleton": + return "skeletonUpdates" + return "volumeUpdates" -def migrate_materialized_layers(annotation: RealDictRow, layer_version_mapping): - for tracing_id in annotation["layers"]: - migrate_materialized_layer(tracing_id, annotation["layers"][tracing_id], layer_version_mapping) + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping): + for tracing_id, tracing_type in annotation["layers"].items(): + self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) -def migrate_materialized_layer(tracing_id, layer_type, layer_version_mapping): - if layer_type == "Skeleton": - migrate_skeleton_proto(tracing_id, layer_version_mapping) - if layer_type == "Volume": - migrate_volume_proto(tracing_id, layer_version_mapping) - migrate_volume_buckets(tracing_id, layer_version_mapping) - migrate_segment_index(tracing_id, layer_version_mapping) - migrate_editable_mapping(tracing_id, layer_version_mapping) + def migrate_materialized_layer(self, tracing_id, layer_type, layer_version_mapping): + if layer_type == "Skeleton": + self.migrate_skeleton_proto(tracing_id, layer_version_mapping) + if layer_type == "Volume": + self.migrate_volume_proto(tracing_id, layer_version_mapping) + self.migrate_volume_buckets(tracing_id, layer_version_mapping) + self.migrate_segment_index(tracing_id, layer_version_mapping) + self.migrate_editable_mapping(tracing_id, layer_version_mapping) -def migrate_skeleton_proto(layer, layer_version_mapping): - pass + def migrate_skeleton_proto(self, layer, layer_version_mapping): + pass -def migrate_volume_proto(layer, layer_version_mapping): - pass + def migrate_volume_proto(self, layer, layer_version_mapping): + pass -def migrate_volume_buckets(layer, layer_version_mapping): - pass + def migrate_volume_buckets(self, layer, layer_version_mapping): + pass -def migrate_segment_index(layer, layer_version_mapping): - pass + def migrate_segment_index(self, layer, layer_version_mapping): + pass -def migrate_editable_mapping(layer, layer_version_mapping): - migrate_editable_mapping_info(layer, layer_version_mapping) - migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) - migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) + def migrate_editable_mapping(self, layer, layer_version_mapping): + self.migrate_editable_mapping_info(layer, layer_version_mapping) + self.migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) + self.migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) -def migrate_editable_mapping_info(layer, layer_version_mapping): - pass + def migrate_editable_mapping_info(self, layer, layer_version_mapping): + pass -def migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping): - pass + def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_mapping): + pass -def migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping): - pass + def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): + pass -def insert_annotation_protos(annotation, layer_version_mapping): - pass + def insert_annotation_protos(self, annotation, layer_version_mapping): + pass -def read_annotation_list(start_time: datetime): - before = time.time() - logger.info("Determining annotation count from postgres...") - page_size = 100 - connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') - cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) - modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") - cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") - annotation_count = cursor.fetchone()['count'] - logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") - annotations = [] - page_count = math.ceil(annotation_count / page_size) - for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): - query = f""" - SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers - FROM webknossos.annotation_layers al - JOIN webknossos.annotations a on al._annotation = a._id - WHERE a.modified < {modified_str} - GROUP BY a._id - ORDER BY a._id - LIMIT {page_size} - OFFSET {page_size * page_num} - """ - cursor.execute(query) - annotations += cursor.fetchall() - logger.info(f"Loading annotations took {time.time() - before} s") - return annotations + def read_annotation_list(self, start_time: datetime): + before = time.time() + logger.info("Determining annotation count from postgres...") + page_size = 10000 + connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') + cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") + cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") + annotation_count = cursor.fetchone()['count'] + logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") + annotations = [] + page_count = math.ceil(annotation_count / page_size) + for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): + query = f""" + SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers + FROM webknossos.annotation_layers al + JOIN webknossos.annotations a on al._annotation = a._id + WHERE a.modified < {modified_str} + GROUP BY a._id + ORDER BY a._id + LIMIT {page_size} + OFFSET {page_size * page_num} + """ + cursor.execute(query) + annotations += cursor.fetchall() + logger.info(f"Loading annotations took {time.time() - before} s") + return annotations def connect_to_fossildb(host): From aa86dada8898227a81c3994ead92e9675fc1879f Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 21 Nov 2024 15:20:31 +0100 Subject: [PATCH 204/361] migrate updates in stupid concat strategy --- .../main.py | 85 ++++++++++++++----- 1 file changed, 62 insertions(+), 23 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index b1859b97ac8..29662d68415 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -10,7 +10,7 @@ import logging import datetime import time -from typing import Dict +from typing import Dict, Iterator, Tuple, List from rich.progress import track import fossildbapi_pb2 as proto @@ -45,39 +45,63 @@ def __init__(self, args): def run(self): start_time = datetime.datetime.now() - logger.info(f"Using start time {start_time}") - annotations = self.read_annotation_list(start_time) - for annotation in annotations: self.migrate_annotation(annotation) - - - def migrate_annotation(self, annotation): - print(f"Migrating annotation {annotation['_id']} ...") + logger.info(f"Migrating annotation {annotation['_id']} ...") # layerId → {version_before → version_after} + before = time.time() layer_version_mapping = self.migrate_updates(annotation) self.migrate_materialized_layers(annotation, layer_version_mapping) - + logger.info(f"Took {time.time() - before} s") def migrate_updates(self, annotation) -> Dict[str, Dict[int, int]]: - layers = annotation["layers"] - global_version = 0 - for tracing_id, tracing_type in annotation["layers"].items(): - old_newest_version = self.get_newest_version(tracing_id, tracing_type) - # TODO - return {} - - def get_newest_version(self, tracing_id: str, layer_type: str) -> int: - collection = self.update_collection_for_layer_type(layer_type) + unified_version = 0 + version_mapping = {} + for tracing_id, layer_type in annotation["layers"].items(): + collection = self.update_collection_for_layer_type(layer_type) + version_mapping_for_layer = {0: 0} + newest_version = self.get_newest_version(tracing_id, collection) + for batch_start, batch_end in batch_range(newest_version, 1000): + update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) + for version, update_group in update_groups: + update_group = self.process_update_group(update_group) + unified_version += 1 + version_mapping_for_layer[version] = unified_version + self.save_update_group(unified_version, update_group) + version_mapping[tracing_id] = version_mapping_for_layer + + # TODO proofreading + # TODO interleave updates rather than concat + return version_mapping + + def process_update_group(self, update_group_raw: str) -> str: + # TODO renamings, add actionTracingId + return update_group_raw + + def save_update_group(self, version, update_group_raw: str) -> None: + # TODO save to dst_stub + return + + def get_newest_version(self, tracing_id: str, collection: str) -> int: getReply = self.src_stub.Get( proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) ) - assert_success(getReply) - return getReply.actualVersion + if getReply.success: + return getReply.actualVersion + return 0 + + def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end: int) -> List[Tuple[int, str]]: + reply = self.src_stub.GetMultipleVersions( + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end-1) + ) + assert_success(reply) + reply.versions.reverse() + reply.values.reverse() + return list(zip(reply.versions, reply.values)) def update_collection_for_layer_type(self, layer_type): if layer_type == "Skeleton": @@ -88,7 +112,6 @@ def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_map for tracing_id, tracing_type in annotation["layers"].items(): self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) - def migrate_materialized_layer(self, tracing_id, layer_type, layer_version_mapping): if layer_type == "Skeleton": self.migrate_skeleton_proto(tracing_id, layer_version_mapping) @@ -99,9 +122,14 @@ def migrate_materialized_layer(self, tracing_id, layer_type, layer_version_mappi self.migrate_editable_mapping(tracing_id, layer_version_mapping) - def migrate_skeleton_proto(self, layer, layer_version_mapping): - pass + def migrate_skeleton_proto(self, tracing_id, layer_version_mapping): + materialized_versions = self.list_versions("skeletons", tracing_id) + print(materialized_versions) + def list_versions(self, collection, key) -> List[int]: + reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) + assert_success(reply) + return reply.versions def migrate_volume_proto(self, layer, layer_version_mapping): pass @@ -166,6 +194,17 @@ def read_annotation_list(self, start_time: datetime): return annotations +def batch_range( + limit: int, batch_size: int +) -> Iterator[Tuple[int, int]]: + full_range = range(limit) + + for i in range(full_range.start, full_range.stop, batch_size): + yield (i, min(i + batch_size, full_range.stop)) + + if i + batch_size >= full_range.stop: + return + def connect_to_fossildb(host): max_message_length = 2147483647 channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) From fbd5ac8bc64333a21cfbe00dcfa67c1778017989 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 21 Nov 2024 15:46:36 +0100 Subject: [PATCH 205/361] migrate skeleton + volume proto --- .../main.py | 42 +++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 29662d68415..e4876e25306 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -36,6 +36,8 @@ def setup_logging(): handler.setFormatter(formatter) root.addHandler(handler) +LayerVersionMapping = Dict[str, Dict[int, int]] + class Migration: def __init__(self, args): @@ -56,9 +58,9 @@ def migrate_annotation(self, annotation): before = time.time() layer_version_mapping = self.migrate_updates(annotation) self.migrate_materialized_layers(annotation, layer_version_mapping) - logger.info(f"Took {time.time() - before} s") + log_since(before, "Migrating annotation") - def migrate_updates(self, annotation) -> Dict[str, Dict[int, int]]: + def migrate_updates(self, annotation) -> LayerVersionMapping: unified_version = 0 version_mapping = {} for tracing_id, layer_type in annotation["layers"].items(): @@ -94,9 +96,9 @@ def get_newest_version(self, tracing_id: str, collection: str) -> int: return getReply.actualVersion return 0 - def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end: int) -> List[Tuple[int, str]]: + def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, str]]: reply = self.src_stub.GetMultipleVersions( - proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end-1) + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) ) assert_success(reply) reply.versions.reverse() @@ -108,11 +110,11 @@ def update_collection_for_layer_type(self, layer_type): return "skeletonUpdates" return "volumeUpdates" - def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping): + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping): for tracing_id, tracing_type in annotation["layers"].items(): self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) - def migrate_materialized_layer(self, tracing_id, layer_type, layer_version_mapping): + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping): if layer_type == "Skeleton": self.migrate_skeleton_proto(tracing_id, layer_version_mapping) if layer_type == "Volume": @@ -121,19 +123,31 @@ def migrate_materialized_layer(self, tracing_id, layer_type, layer_version_mappi self.migrate_segment_index(tracing_id, layer_version_mapping) self.migrate_editable_mapping(tracing_id, layer_version_mapping) + def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_versions_untouched("skeletons", tracing_id, layer_version_mapping) - def migrate_skeleton_proto(self, tracing_id, layer_version_mapping): - materialized_versions = self.list_versions("skeletons", tracing_id) - print(materialized_versions) + def migrate_versions_untouched(self, collection: str, key: str, layer_version_mapping: LayerVersionMapping): + materialized_versions = self.list_versions(collection, key) + for materialized_version in materialized_versions: + value_bytes = self.get_bytes(collection, key, materialized_version) + self.save_bytes(collection, key, layer_version_mapping[key][materialized_version], value_bytes) def list_versions(self, collection, key) -> List[int]: reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) assert_success(reply) return reply.versions - def migrate_volume_proto(self, layer, layer_version_mapping): - pass + def get_bytes(self, collection: str, key: str, version: int) -> bytes: + reply = self.src_stub.Get(proto.GetRequest(collection=collection, key=key, version=version)) + assert_success(reply) + return reply.value + def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: + # TODO + pass + + def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_versions_untouched("volumes", tracing_id, layer_version_mapping) def migrate_volume_buckets(self, layer, layer_version_mapping): pass @@ -190,10 +204,14 @@ def read_annotation_list(self, start_time: datetime): """ cursor.execute(query) annotations += cursor.fetchall() - logger.info(f"Loading annotations took {time.time() - before} s") + log_since(before, "Loading annotations") return annotations +def log_since(before, label) -> None: + diff = time.time() - before + logger.info(f"{label} took {diff:.2f} s") + def batch_range( limit: int, batch_size: int ) -> Iterator[Tuple[int, int]]: From fce65767978490a8e0d9dda7c3b57721a9904989 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 21 Nov 2024 15:57:58 +0100 Subject: [PATCH 206/361] restructure migration code --- .../connections.py | 44 ++++ .../main.py | 235 +----------------- .../migration.py | 192 ++++++++++++++ .../utils.py | 34 +++ 4 files changed, 272 insertions(+), 233 deletions(-) create mode 100644 tools/migration-unified-annotation-versioning/connections.py create mode 100644 tools/migration-unified-annotation-versioning/migration.py create mode 100644 tools/migration-unified-annotation-versioning/utils.py diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py new file mode 100644 index 00000000000..2c4d3f9a19e --- /dev/null +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +import argparse +import grpc +import sys +import psycopg2 +import psycopg2.extras +from psycopg2.extras import RealDictRow +import math +import logging +import datetime +import time +from typing import Dict, Iterator, Tuple, List +from rich.progress import track + +import fossildbapi_pb2 as proto +import fossildbapi_pb2_grpc as proto_rpc + + +def connect_to_fossildb(host): + max_message_length = 2147483647 + channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) + stub = proto_rpc.FossilDBStub(channel) + test_fossildb_health(stub, f"fossildb at {host}") + return stub + + +def test_fossildb_health(stub, label): + try: + reply = stub.Health(proto.HealthRequest()) + assert_grpc_success(reply) + print('successfully connected to ' + label) + except Exception as e: + print('failed to connect to ' + label + ': ' + str(e)) + sys.exit(1) + + +def assert_grpc_success(reply): + if not reply.success: + raise Exception("reply.success failed: " + reply.errorMessage) + + +def connect_to_postgres(): + return psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index e4876e25306..7d0d9c66341 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -1,20 +1,8 @@ #!/usr/bin/env python3 -import argparse -import grpc -import sys -import psycopg2 -import psycopg2.extras -from psycopg2.extras import RealDictRow -import math import logging -import datetime -import time -from typing import Dict, Iterator, Tuple, List -from rich.progress import track - -import fossildbapi_pb2 as proto -import fossildbapi_pb2_grpc as proto_rpc +from migration import Migration +from utils import setup_logging logger = logging.getLogger(__name__) @@ -26,225 +14,6 @@ def main(): migration = Migration(args) migration.run() -def setup_logging(): - root = logging.getLogger() - root.setLevel(logging.DEBUG) - - handler = logging.StreamHandler(sys.stdout) - handler.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s") - handler.setFormatter(formatter) - root.addHandler(handler) - -LayerVersionMapping = Dict[str, Dict[int, int]] - -class Migration: - - def __init__(self, args): - self.args = args - self.src_stub = connect_to_fossildb("localhost:7155") - # self.dst_stub = connect_to_fossildb("localhost:7199") - - def run(self): - start_time = datetime.datetime.now() - logger.info(f"Using start time {start_time}") - annotations = self.read_annotation_list(start_time) - for annotation in annotations: - self.migrate_annotation(annotation) - - def migrate_annotation(self, annotation): - logger.info(f"Migrating annotation {annotation['_id']} ...") - # layerId → {version_before → version_after} - before = time.time() - layer_version_mapping = self.migrate_updates(annotation) - self.migrate_materialized_layers(annotation, layer_version_mapping) - log_since(before, "Migrating annotation") - - def migrate_updates(self, annotation) -> LayerVersionMapping: - unified_version = 0 - version_mapping = {} - for tracing_id, layer_type in annotation["layers"].items(): - collection = self.update_collection_for_layer_type(layer_type) - version_mapping_for_layer = {0: 0} - newest_version = self.get_newest_version(tracing_id, collection) - for batch_start, batch_end in batch_range(newest_version, 1000): - update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) - for version, update_group in update_groups: - update_group = self.process_update_group(update_group) - unified_version += 1 - version_mapping_for_layer[version] = unified_version - self.save_update_group(unified_version, update_group) - version_mapping[tracing_id] = version_mapping_for_layer - - # TODO proofreading - # TODO interleave updates rather than concat - return version_mapping - - def process_update_group(self, update_group_raw: str) -> str: - # TODO renamings, add actionTracingId - return update_group_raw - - def save_update_group(self, version, update_group_raw: str) -> None: - # TODO save to dst_stub - return - - def get_newest_version(self, tracing_id: str, collection: str) -> int: - getReply = self.src_stub.Get( - proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) - ) - if getReply.success: - return getReply.actualVersion - return 0 - - def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, str]]: - reply = self.src_stub.GetMultipleVersions( - proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) - ) - assert_success(reply) - reply.versions.reverse() - reply.values.reverse() - return list(zip(reply.versions, reply.values)) - - def update_collection_for_layer_type(self, layer_type): - if layer_type == "Skeleton": - return "skeletonUpdates" - return "volumeUpdates" - - def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping): - for tracing_id, tracing_type in annotation["layers"].items(): - self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) - - def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping): - if layer_type == "Skeleton": - self.migrate_skeleton_proto(tracing_id, layer_version_mapping) - if layer_type == "Volume": - self.migrate_volume_proto(tracing_id, layer_version_mapping) - self.migrate_volume_buckets(tracing_id, layer_version_mapping) - self.migrate_segment_index(tracing_id, layer_version_mapping) - self.migrate_editable_mapping(tracing_id, layer_version_mapping) - - def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - self.migrate_versions_untouched("skeletons", tracing_id, layer_version_mapping) - - def migrate_versions_untouched(self, collection: str, key: str, layer_version_mapping: LayerVersionMapping): - materialized_versions = self.list_versions(collection, key) - for materialized_version in materialized_versions: - value_bytes = self.get_bytes(collection, key, materialized_version) - self.save_bytes(collection, key, layer_version_mapping[key][materialized_version], value_bytes) - - def list_versions(self, collection, key) -> List[int]: - reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) - assert_success(reply) - return reply.versions - - def get_bytes(self, collection: str, key: str, version: int) -> bytes: - reply = self.src_stub.Get(proto.GetRequest(collection=collection, key=key, version=version)) - assert_success(reply) - return reply.value - - def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: - # TODO - pass - - def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - self.migrate_versions_untouched("volumes", tracing_id, layer_version_mapping) - - def migrate_volume_buckets(self, layer, layer_version_mapping): - pass - - - def migrate_segment_index(self, layer, layer_version_mapping): - pass - - - def migrate_editable_mapping(self, layer, layer_version_mapping): - self.migrate_editable_mapping_info(layer, layer_version_mapping) - self.migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) - self.migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) - - - def migrate_editable_mapping_info(self, layer, layer_version_mapping): - pass - - - def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_mapping): - pass - - - def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): - pass - - - def insert_annotation_protos(self, annotation, layer_version_mapping): - pass - - - def read_annotation_list(self, start_time: datetime): - before = time.time() - logger.info("Determining annotation count from postgres...") - page_size = 10000 - connection = psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') - cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) - modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") - cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") - annotation_count = cursor.fetchone()['count'] - logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") - annotations = [] - page_count = math.ceil(annotation_count / page_size) - for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): - query = f""" - SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers - FROM webknossos.annotation_layers al - JOIN webknossos.annotations a on al._annotation = a._id - WHERE a.modified < {modified_str} - GROUP BY a._id - ORDER BY a._id - LIMIT {page_size} - OFFSET {page_size * page_num} - """ - cursor.execute(query) - annotations += cursor.fetchall() - log_since(before, "Loading annotations") - return annotations - - -def log_since(before, label) -> None: - diff = time.time() - before - logger.info(f"{label} took {diff:.2f} s") - -def batch_range( - limit: int, batch_size: int -) -> Iterator[Tuple[int, int]]: - full_range = range(limit) - - for i in range(full_range.start, full_range.stop, batch_size): - yield (i, min(i + batch_size, full_range.stop)) - - if i + batch_size >= full_range.stop: - return - -def connect_to_fossildb(host): - max_message_length = 2147483647 - channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) - stub = proto_rpc.FossilDBStub(channel) - test_health(stub, f"fossildb at {host}") - return stub - - -def test_health(stub, label): - try: - reply = stub.Health(proto.HealthRequest()) - assert_success(reply) - print('successfully connected to ' + label) - except Exception as e: - print('failed to connect to ' + label + ': ' + str(e)) - sys.exit(1) - - -def assert_success(reply): - if not reply.success: - raise Exception("reply.success failed: " + reply.errorMessage) - if __name__ == '__main__': main() diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py new file mode 100644 index 00000000000..f383ed9f7a8 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python3 + +import psycopg2 +import psycopg2.extras +from psycopg2.extras import RealDictRow +import math +import logging +import datetime +import time +from typing import Dict, Tuple, List +from rich.progress import track + +import fossildbapi_pb2 as proto +from utils import log_since, batch_range + +from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success + +logger = logging.getLogger(__name__) + +LayerVersionMapping = Dict[str, Dict[int, int]] + + +class Migration: + + def __init__(self, args): + self.args = args + self.src_stub = connect_to_fossildb("localhost:7155") + # self.dst_stub = connect_to_fossildb("localhost:7199") + + def run(self): + start_time = datetime.datetime.now() + before = time.time() + logger.info(f"Using start time {start_time}") + annotations = self.read_annotation_list(start_time) + for annotation in annotations: + self.migrate_annotation(annotation) + log_since(before, "Migrating all the things") + + def migrate_annotation(self, annotation): + logger.info(f"Migrating annotation {annotation['_id']} ...") + # layerId → {version_before → version_after} + before = time.time() + layer_version_mapping = self.migrate_updates(annotation) + self.migrate_materialized_layers(annotation, layer_version_mapping) + log_since(before, "") + + def migrate_updates(self, annotation) -> LayerVersionMapping: + unified_version = 0 + version_mapping = {} + for tracing_id, layer_type in annotation["layers"].items(): + collection = self.update_collection_for_layer_type(layer_type) + version_mapping_for_layer = {0: 0} + newest_version = self.get_newest_version(tracing_id, collection) + for batch_start, batch_end in batch_range(newest_version, 1000): + update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) + for version, update_group in update_groups: + update_group = self.process_update_group(update_group) + unified_version += 1 + version_mapping_for_layer[version] = unified_version + self.save_update_group(unified_version, update_group) + version_mapping[tracing_id] = version_mapping_for_layer + + # TODO proofreading + # TODO interleave updates rather than concat + return version_mapping + + def process_update_group(self, update_group_raw: str) -> str: + # TODO renamings, add actionTracingId + return update_group_raw + + def save_update_group(self, version, update_group_raw: str) -> None: + # TODO save to dst_stub + return + + def get_newest_version(self, tracing_id: str, collection: str) -> int: + getReply = self.src_stub.Get( + proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) + ) + if getReply.success: + return getReply.actualVersion + return 0 + + def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, str]]: + reply = self.src_stub.GetMultipleVersions( + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) + ) + assert_grpc_success(reply) + reply.versions.reverse() + reply.values.reverse() + return list(zip(reply.versions, reply.values)) + + def update_collection_for_layer_type(self, layer_type): + if layer_type == "Skeleton": + return "skeletonUpdates" + return "volumeUpdates" + + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping): + for tracing_id, tracing_type in annotation["layers"].items(): + self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) + + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping): + if layer_type == "Skeleton": + self.migrate_skeleton_proto(tracing_id, layer_version_mapping) + if layer_type == "Volume": + self.migrate_volume_proto(tracing_id, layer_version_mapping) + self.migrate_volume_buckets(tracing_id, layer_version_mapping) + self.migrate_segment_index(tracing_id, layer_version_mapping) + self.migrate_editable_mapping(tracing_id, layer_version_mapping) + + def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_versions_untouched("skeletons", tracing_id, layer_version_mapping) + + def migrate_versions_untouched(self, collection: str, key: str, layer_version_mapping: LayerVersionMapping): + materialized_versions = self.list_versions(collection, key) + for materialized_version in materialized_versions: + value_bytes = self.get_bytes(collection, key, materialized_version) + self.save_bytes(collection, key, layer_version_mapping[key][materialized_version], value_bytes) + + def list_versions(self, collection, key) -> List[int]: + reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) + assert_grpc_success(reply) + return reply.versions + + def get_bytes(self, collection: str, key: str, version: int) -> bytes: + reply = self.src_stub.Get(proto.GetRequest(collection=collection, key=key, version=version)) + assert_grpc_success(reply) + return reply.value + + def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: + # TODO + pass + + def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_versions_untouched("volumes", tracing_id, layer_version_mapping) + + def migrate_volume_buckets(self, layer, layer_version_mapping): + pass + + + def migrate_segment_index(self, layer, layer_version_mapping): + pass + + + def migrate_editable_mapping(self, layer, layer_version_mapping): + self.migrate_editable_mapping_info(layer, layer_version_mapping) + self.migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) + self.migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) + + + def migrate_editable_mapping_info(self, layer, layer_version_mapping): + pass + + + def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_mapping): + pass + + + def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): + pass + + + def insert_annotation_protos(self, annotation, layer_version_mapping): + pass + + + def read_annotation_list(self, start_time: datetime): + before = time.time() + logger.info("Determining annotation count from postgres...") + page_size = 10000 + connection = connect_to_postgres() + cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) + modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") + cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") + annotation_count = cursor.fetchone()['count'] + logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") + annotations = [] + page_count = math.ceil(annotation_count / page_size) + for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): + query = f""" + SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers + FROM webknossos.annotation_layers al + JOIN webknossos.annotations a on al._annotation = a._id + WHERE a.modified < {modified_str} + GROUP BY a._id + ORDER BY a._id + LIMIT {page_size} + OFFSET {page_size * page_num} + """ + cursor.execute(query) + annotations += cursor.fetchall() + log_since(before, "Loading annotations") + return annotations diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py new file mode 100644 index 00000000000..3c7ff86026f --- /dev/null +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -0,0 +1,34 @@ +import logging +import time +from typing import Iterator, Tuple +import sys + +logger = logging.getLogger(__name__) + + +def setup_logging(): + root = logging.getLogger() + root.setLevel(logging.DEBUG) + + handler = logging.StreamHandler(sys.stdout) + handler.setLevel(logging.DEBUG) + formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s") + handler.setFormatter(formatter) + root.addHandler(handler) + + +def log_since(before, label: str) -> None: + diff = time.time() - before + logger.info(f"{label} took {diff:.2f} s") + + +def batch_range( + limit: int, batch_size: int +) -> Iterator[Tuple[int, int]]: + full_range = range(limit) + + for i in range(full_range.start, full_range.stop, batch_size): + yield (i, min(i + batch_size, full_range.stop)) + + if i + batch_size >= full_range.stop: + return From 187eed079e42d0ba09a3c737b14fba8184fc9963 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 21 Nov 2024 16:26:29 +0100 Subject: [PATCH 207/361] cleanup --- .../connections.py | 24 +++++-------------- .../migration.py | 11 +-------- 2 files changed, 7 insertions(+), 28 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py index 2c4d3f9a19e..084fe641262 100644 --- a/tools/migration-unified-annotation-versioning/connections.py +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -1,38 +1,26 @@ -#!/usr/bin/env python3 - -import argparse import grpc -import sys import psycopg2 import psycopg2.extras -from psycopg2.extras import RealDictRow -import math import logging -import datetime -import time -from typing import Dict, Iterator, Tuple, List -from rich.progress import track import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc +logger = logging.getLogger(__name__) + def connect_to_fossildb(host): max_message_length = 2147483647 channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) stub = proto_rpc.FossilDBStub(channel) - test_fossildb_health(stub, f"fossildb at {host}") + test_fossildb_health(stub, f"Fossildb at {host}") return stub def test_fossildb_health(stub, label): - try: - reply = stub.Health(proto.HealthRequest()) - assert_grpc_success(reply) - print('successfully connected to ' + label) - except Exception as e: - print('failed to connect to ' + label + ': ' + str(e)) - sys.exit(1) + reply = stub.Health(proto.HealthRequest()) + assert_grpc_success(reply) + logger.info('Successfully connected to ' + label) def assert_grpc_success(reply): diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index f383ed9f7a8..bddfd1a46ff 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 - import psycopg2 import psycopg2.extras from psycopg2.extras import RealDictRow @@ -12,11 +10,11 @@ import fossildbapi_pb2 as proto from utils import log_since, batch_range - from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success logger = logging.getLogger(__name__) + LayerVersionMapping = Dict[str, Dict[int, int]] @@ -136,33 +134,26 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers def migrate_volume_buckets(self, layer, layer_version_mapping): pass - def migrate_segment_index(self, layer, layer_version_mapping): pass - def migrate_editable_mapping(self, layer, layer_version_mapping): self.migrate_editable_mapping_info(layer, layer_version_mapping) self.migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) self.migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) - def migrate_editable_mapping_info(self, layer, layer_version_mapping): pass - def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_mapping): pass - def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): pass - def insert_annotation_protos(self, annotation, layer_version_mapping): pass - def read_annotation_list(self, start_time: datetime): before = time.time() logger.info("Determining annotation count from postgres...") From 5dbaceba9025665da6c5714508aae83c01021c2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 25 Nov 2024 11:45:32 +0100 Subject: [PATCH 208/361] add two frontend TODO comments --- frontend/javascripts/oxalis/model/reducers/save_reducer.ts | 1 + frontend/javascripts/oxalis/model_initialization.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 61655226f1d..f16d4133c0f 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -189,6 +189,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } const layerIndependentActions = new Set([ + // TODOM: sync this with the backend. The backend currently has only two such actions that have this requirement. "updateTdCamera", "revertToVersion", "addLayerToAnnotation", diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 34a0e6f478d..ab047deef58 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -138,6 +138,7 @@ export async function initialize( maybeOutdatedAnnotation.tracingStore.url, maybeOutdatedAnnotation.id, ); + // TODOP: potential updating the version of the annotation is needed. It is at least not done here. const layersWithStats = annotationFromTracingStore.annotationLayers.map((layer) => { return { tracingId: layer.tracingId, From 7f96b42cb37c505204a83ab770b079e4a037dc40 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 11:53:18 +0100 Subject: [PATCH 209/361] migration: renamings for update actions, insert actionTracingId --- .../migration.py | 39 ++++++++++++++----- .../requirements.txt | 1 + 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index bddfd1a46ff..390548b3daf 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -5,8 +5,9 @@ import logging import datetime import time -from typing import Dict, Tuple, List +from typing import Dict, Tuple, List, Optional from rich.progress import track +import orjson import fossildbapi_pb2 as proto from utils import log_since, batch_range @@ -49,25 +50,45 @@ def migrate_updates(self, annotation) -> LayerVersionMapping: collection = self.update_collection_for_layer_type(layer_type) version_mapping_for_layer = {0: 0} newest_version = self.get_newest_version(tracing_id, collection) + editable_mapping_id_opt = None # TODO parse from newest_version (value needs to be parsed as proto) for batch_start, batch_end in batch_range(newest_version, 1000): update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) for version, update_group in update_groups: - update_group = self.process_update_group(update_group) + update_group = self.process_update_group(tracing_id, layer_type, update_group) unified_version += 1 version_mapping_for_layer[version] = unified_version self.save_update_group(unified_version, update_group) version_mapping[tracing_id] = version_mapping_for_layer + if editable_mapping_id_opt is not None: + # TODO migrate editable mapping updates + pass - # TODO proofreading # TODO interleave updates rather than concat + # TODO handle existing revertToVersion update actions return version_mapping - def process_update_group(self, update_group_raw: str) -> str: - # TODO renamings, add actionTracingId - return update_group_raw + def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes) -> bytes: + update_group_parsed = orjson.loads(update_group_raw) - def save_update_group(self, version, update_group_raw: str) -> None: - # TODO save to dst_stub + # TODO handle existing revertToVersion update actions + + for update in update_group_parsed: + name = update["name"] + + if name == "updateTracing": + update["name"] = f"update{layer_type}Tracing" + elif name == "updateUserBoundingBoxes": + update["name"] = f"updateUserBoundingBoxesIn{layer_type}Tracing" + elif name == "updateUserBoundingBoxVisibility": + update["name"] = f"updateUserBoundingBoxVisibilityIn{layer_type}Tracing" + + if not name == "updateTdCamera": + update["value"]["actionTracinId"] = tracing_id + + return orjson.dumps(update_group_parsed) + + def save_update_group(self, version, update_group_raw: bytes) -> None: + print(f"saving update group: {update_group_raw}") return def get_newest_version(self, tracing_id: str, collection: str) -> int: @@ -78,7 +99,7 @@ def get_newest_version(self, tracing_id: str, collection: str) -> int: return getReply.actualVersion return 0 - def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, str]]: + def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, bytes]]: reply = self.src_stub.GetMultipleVersions( proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) ) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt index 65865cf0f9a..28223d2d908 100644 --- a/tools/migration-unified-annotation-versioning/requirements.txt +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -3,3 +3,4 @@ argparse psycopg2 protobuf rich +orjson From 9b72b1557254c4b7b90f18e2f1a662be399ae21e Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 12:00:53 +0100 Subject: [PATCH 210/361] migration: add generaetd code --- .../generated/Annotation_pb2.py | 29 ++++++++++++ .../generated/SkeletonTracing_pb2.py | 45 +++++++++++++++++++ .../generated/VolumeTracing_pb2.py | 37 +++++++++++++++ .../generated/__init__.py | 0 .../{ => generated}/fossildbapi_pb2.py | 0 .../{ => generated}/fossildbapi_pb2_grpc.py | 0 .../migration.py | 3 +- 7 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py create mode 100644 tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py create mode 100644 tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py create mode 100644 tools/migration-unified-annotation-versioning/generated/__init__.py rename tools/migration-unified-annotation-versioning/{ => generated}/fossildbapi_pb2.py (100%) rename tools/migration-unified-annotation-versioning/{ => generated}/fossildbapi_pb2_grpc.py (100%) diff --git a/tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py new file mode 100644 index 00000000000..35e9ad6cef0 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: Annotation.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xc0\x01\n\x0f\x41nnotationProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x04 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x05 \x02(\x03\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _ANNOTATIONLAYERTYPEPROTO._serialized_start=393 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=445 + _ANNOTATIONPROTO._serialized_start=61 + _ANNOTATIONPROTO._serialized_end=253 + _ANNOTATIONLAYERPROTO._serialized_start=256 + _ANNOTATIONLAYERPROTO._serialized_end=391 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py b/tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py new file mode 100644 index 00000000000..daac9b940f1 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: SkeletonTracing.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import geometry_pb2 as geometry__pb2 +import MetadataEntry_pb2 as MetadataEntry__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15SkeletonTracing.proto\x12&com.scalableminds.webknossos.datastore\x1a\x0egeometry.proto\x1a\x13MetadataEntry.proto\"\xf9\x02\n\x04Node\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x46\n\x08position\x18\x02 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12I\n\x08rotation\x18\x03 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12\x0e\n\x06radius\x18\x04 \x02(\x02\x12\x10\n\x08viewport\x18\x05 \x02(\x05\x12\x0b\n\x03mag\x18\x06 \x02(\x05\x12\x10\n\x08\x62itDepth\x18\x07 \x02(\x05\x12\x15\n\rinterpolation\x18\x08 \x02(\x08\x12\x18\n\x10\x63reatedTimestamp\x18\t \x02(\x03\x12`\n\x15\x61\x64\x64itionalCoordinates\x18\n \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\"&\n\x04\x45\x64ge\x12\x0e\n\x06source\x18\x01 \x02(\x05\x12\x0e\n\x06target\x18\x02 \x02(\x05\"*\n\x07\x43omment\x12\x0e\n\x06nodeId\x18\x01 \x02(\x05\x12\x0f\n\x07\x63ontent\x18\x02 \x02(\t\"7\n\x0b\x42ranchPoint\x12\x0e\n\x06nodeId\x18\x01 \x02(\x05\x12\x18\n\x10\x63reatedTimestamp\x18\x02 \x02(\x03\"\xd9\x04\n\x04Tree\x12\x0e\n\x06treeId\x18\x01 \x02(\x05\x12;\n\x05nodes\x18\x02 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Node\x12;\n\x05\x65\x64ges\x18\x03 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Edge\x12\x41\n\x05\x63olor\x18\x04 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12I\n\x0c\x62ranchPoints\x18\x05 \x03(\x0b\x32\x33.com.scalableminds.webknossos.datastore.BranchPoint\x12\x41\n\x08\x63omments\x18\x06 \x03(\x0b\x32/.com.scalableminds.webknossos.datastore.Comment\x12\x0c\n\x04name\x18\x07 \x02(\t\x12\x18\n\x10\x63reatedTimestamp\x18\x08 \x02(\x03\x12\x0f\n\x07groupId\x18\t \x01(\x05\x12\x11\n\tisVisible\x18\n \x01(\x08\x12\x43\n\x04type\x18\x0b \x01(\x0e\x32\x35.com.scalableminds.webknossos.datastore.TreeTypeProto\x12\x17\n\x0f\x65\x64gesAreVisible\x18\x0c \x01(\x08\x12L\n\x08metadata\x18\r \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.MetadataEntryProto\"\x83\x01\n\tTreeGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07groupId\x18\x02 \x02(\x05\x12\x43\n\x08\x63hildren\x18\x03 \x03(\x0b\x32\x31.com.scalableminds.webknossos.datastore.TreeGroup\x12\x12\n\nisExpanded\x18\x04 \x01(\x08\"\xf0\x06\n\x0fSkeletonTracing\x12\x13\n\x0b\x64\x61tasetName\x18\x01 \x02(\t\x12;\n\x05trees\x18\x02 \x03(\x0b\x32,.com.scalableminds.webknossos.datastore.Tree\x12\x18\n\x10\x63reatedTimestamp\x18\x03 \x02(\x03\x12M\n\x0b\x62oundingBox\x18\x04 \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x14\n\x0c\x61\x63tiveNodeId\x18\x05 \x01(\x05\x12J\n\x0c\x65\x64itPosition\x18\x06 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12M\n\x0c\x65\x64itRotation\x18\x07 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12\x11\n\tzoomLevel\x18\x08 \x02(\x01\x12\x0f\n\x07version\x18\t \x02(\x03\x12Q\n\x0fuserBoundingBox\x18\n \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x45\n\ntreeGroups\x18\x0b \x03(\x0b\x32\x31.com.scalableminds.webknossos.datastore.TreeGroup\x12X\n\x11userBoundingBoxes\x18\x0c \x03(\x0b\x32=.com.scalableminds.webknossos.datastore.NamedBoundingBoxProto\x12\x16\n\x0eorganizationId\x18\r \x01(\t\x12l\n!editPositionAdditionalCoordinates\x18\x15 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12S\n\x0e\x61\x64\x64itionalAxes\x18\x16 \x03(\x0b\x32;.com.scalableminds.webknossos.datastore.AdditionalAxisProto\"^\n\x12SkeletonTracingOpt\x12H\n\x07tracing\x18\x01 \x01(\x0b\x32\x37.com.scalableminds.webknossos.datastore.SkeletonTracing\"`\n\x10SkeletonTracings\x12L\n\x08tracings\x18\x01 \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.SkeletonTracingOpt*-\n\rTreeTypeProto\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x0f\n\x0b\x41GGLOMERATE\x10\x01') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'SkeletonTracing_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _TREETYPEPROTO._serialized_start=2438 + _TREETYPEPROTO._serialized_end=2483 + _NODE._serialized_start=103 + _NODE._serialized_end=480 + _EDGE._serialized_start=482 + _EDGE._serialized_end=520 + _COMMENT._serialized_start=522 + _COMMENT._serialized_end=564 + _BRANCHPOINT._serialized_start=566 + _BRANCHPOINT._serialized_end=621 + _TREE._serialized_start=624 + _TREE._serialized_end=1225 + _TREEGROUP._serialized_start=1228 + _TREEGROUP._serialized_end=1359 + _SKELETONTRACING._serialized_start=1362 + _SKELETONTRACING._serialized_end=2242 + _SKELETONTRACINGOPT._serialized_start=2244 + _SKELETONTRACINGOPT._serialized_end=2338 + _SKELETONTRACINGS._serialized_start=2340 + _SKELETONTRACINGS._serialized_end=2436 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py b/tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py new file mode 100644 index 00000000000..1c917e7cf49 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: VolumeTracing.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import geometry_pb2 as geometry__pb2 +import MetadataEntry_pb2 as MetadataEntry__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13VolumeTracing.proto\x12&com.scalableminds.webknossos.datastore\x1a\x0egeometry.proto\x1a\x13MetadataEntry.proto\"\xa0\x03\n\x07Segment\x12\x11\n\tsegmentId\x18\x01 \x02(\x03\x12L\n\x0e\x61nchorPosition\x18\x02 \x01(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x14\n\x0c\x63reationTime\x18\x04 \x01(\x03\x12\x41\n\x05\x63olor\x18\x05 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12\x0f\n\x07groupId\x18\x06 \x01(\x05\x12n\n#anchorPositionAdditionalCoordinates\x18\x07 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12L\n\x08metadata\x18\x0b \x03(\x0b\x32:.com.scalableminds.webknossos.datastore.MetadataEntryProto\"\x84\n\n\rVolumeTracing\x12\x17\n\x0f\x61\x63tiveSegmentId\x18\x01 \x01(\x03\x12M\n\x0b\x62oundingBox\x18\x02 \x02(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12\x18\n\x10\x63reatedTimestamp\x18\x03 \x02(\x03\x12\x13\n\x0b\x64\x61tasetName\x18\x04 \x02(\t\x12J\n\x0c\x65\x64itPosition\x18\x05 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12M\n\x0c\x65\x64itRotation\x18\x06 \x02(\x0b\x32\x37.com.scalableminds.webknossos.datastore.Vec3DoubleProto\x12]\n\x0c\x65lementClass\x18\x07 \x02(\x0e\x32G.com.scalableminds.webknossos.datastore.VolumeTracing.ElementClassProto\x12\x15\n\rfallbackLayer\x18\x08 \x01(\t\x12\x18\n\x10largestSegmentId\x18\t \x01(\x03\x12\x0f\n\x07version\x18\n \x02(\x03\x12\x11\n\tzoomLevel\x18\x0b \x02(\x01\x12Q\n\x0fuserBoundingBox\x18\x0c \x01(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\x12X\n\x11userBoundingBoxes\x18\r \x03(\x0b\x32=.com.scalableminds.webknossos.datastore.NamedBoundingBoxProto\x12\x16\n\x0eorganizationId\x18\x0e \x01(\t\x12\x42\n\x04mags\x18\x0f \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\x41\n\x08segments\x18\x10 \x03(\x0b\x32/.com.scalableminds.webknossos.datastore.Segment\x12\x13\n\x0bmappingName\x18\x11 \x01(\t\x12\x1a\n\x12hasEditableMapping\x18\x12 \x01(\x08\x12K\n\rsegmentGroups\x18\x13 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.SegmentGroup\x12\x17\n\x0fhasSegmentIndex\x18\x14 \x01(\x08\x12l\n!editPositionAdditionalCoordinates\x18\x15 \x03(\x0b\x32\x41.com.scalableminds.webknossos.datastore.AdditionalCoordinateProto\x12S\n\x0e\x61\x64\x64itionalAxes\x18\x16 \x03(\x0b\x32;.com.scalableminds.webknossos.datastore.AdditionalAxisProto\x12\x17\n\x0fmappingIsLocked\x18\x17 \x01(\x08\"N\n\x11\x45lementClassProto\x12\t\n\x05uint8\x10\x01\x12\n\n\x06uint16\x10\x02\x12\n\n\x06uint24\x10\x03\x12\n\n\x06uint32\x10\x04\x12\n\n\x06uint64\x10\x08\"\x89\x01\n\x0cSegmentGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0f\n\x07groupId\x18\x02 \x02(\x05\x12\x46\n\x08\x63hildren\x18\x03 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.SegmentGroup\x12\x12\n\nisExpanded\x18\x04 \x01(\x08\"Z\n\x10VolumeTracingOpt\x12\x46\n\x07tracing\x18\x01 \x01(\x0b\x32\x35.com.scalableminds.webknossos.datastore.VolumeTracing\"\\\n\x0eVolumeTracings\x12J\n\x08tracings\x18\x01 \x03(\x0b\x32\x38.com.scalableminds.webknossos.datastore.VolumeTracingOpt') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'VolumeTracing_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _SEGMENT._serialized_start=101 + _SEGMENT._serialized_end=517 + _VOLUMETRACING._serialized_start=520 + _VOLUMETRACING._serialized_end=1804 + _VOLUMETRACING_ELEMENTCLASSPROTO._serialized_start=1726 + _VOLUMETRACING_ELEMENTCLASSPROTO._serialized_end=1804 + _SEGMENTGROUP._serialized_start=1807 + _SEGMENTGROUP._serialized_end=1944 + _VOLUMETRACINGOPT._serialized_start=1946 + _VOLUMETRACINGOPT._serialized_end=2036 + _VOLUMETRACINGS._serialized_start=2038 + _VOLUMETRACINGS._serialized_end=2130 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/generated/__init__.py b/tools/migration-unified-annotation-versioning/generated/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2.py similarity index 100% rename from tools/migration-unified-annotation-versioning/fossildbapi_pb2.py rename to tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2.py diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2_grpc.py similarity index 100% rename from tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py rename to tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2_grpc.py diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 390548b3daf..58c3483c5ed 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -9,7 +9,8 @@ from rich.progress import track import orjson -import fossildbapi_pb2 as proto +import generated.fossildbapi_pb2 as proto +import generated.SkeletonTracing_pb2 as skeleton from utils import log_since, batch_range from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success From 6c080322b75e141c963530b0dbf598c3e8db2506 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 14:00:11 +0100 Subject: [PATCH 211/361] migration: set tracing proto fields, create annotationProto --- .../{generated => }/Annotation_pb2.py | 0 .../MetadataEntry_pb2.py | 25 ++++ .../{generated => }/SkeletonTracing_pb2.py | 0 .../{generated => }/VolumeTracing_pb2.py | 0 .../{generated => }/fossildbapi_pb2.py | 0 .../{generated => }/fossildbapi_pb2_grpc.py | 0 .../geometry_pb2.py | 41 +++++++ .../migration.py | 116 +++++++++++++----- 8 files changed, 152 insertions(+), 30 deletions(-) rename tools/migration-unified-annotation-versioning/{generated => }/Annotation_pb2.py (100%) create mode 100644 tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py rename tools/migration-unified-annotation-versioning/{generated => }/SkeletonTracing_pb2.py (100%) rename tools/migration-unified-annotation-versioning/{generated => }/VolumeTracing_pb2.py (100%) rename tools/migration-unified-annotation-versioning/{generated => }/fossildbapi_pb2.py (100%) rename tools/migration-unified-annotation-versioning/{generated => }/fossildbapi_pb2_grpc.py (100%) create mode 100644 tools/migration-unified-annotation-versioning/geometry_pb2.py diff --git a/tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py similarity index 100% rename from tools/migration-unified-annotation-versioning/generated/Annotation_pb2.py rename to tools/migration-unified-annotation-versioning/Annotation_pb2.py diff --git a/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py b/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py new file mode 100644 index 00000000000..44b93186fa6 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/MetadataEntry_pb2.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: MetadataEntry.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13MetadataEntry.proto\x12&com.scalableminds.webknossos.datastore\"w\n\x12MetadataEntryProto\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\x13\n\x0bstringValue\x18\x02 \x01(\t\x12\x11\n\tboolValue\x18\x03 \x01(\x08\x12\x13\n\x0bnumberValue\x18\x04 \x01(\x01\x12\x17\n\x0fstringListValue\x18\x05 \x03(\t') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'MetadataEntry_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _METADATAENTRYPROTO._serialized_start=63 + _METADATAENTRYPROTO._serialized_end=182 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py b/tools/migration-unified-annotation-versioning/SkeletonTracing_pb2.py similarity index 100% rename from tools/migration-unified-annotation-versioning/generated/SkeletonTracing_pb2.py rename to tools/migration-unified-annotation-versioning/SkeletonTracing_pb2.py diff --git a/tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py b/tools/migration-unified-annotation-versioning/VolumeTracing_pb2.py similarity index 100% rename from tools/migration-unified-annotation-versioning/generated/VolumeTracing_pb2.py rename to tools/migration-unified-annotation-versioning/VolumeTracing_pb2.py diff --git a/tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py similarity index 100% rename from tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2.py rename to tools/migration-unified-annotation-versioning/fossildbapi_pb2.py diff --git a/tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py similarity index 100% rename from tools/migration-unified-annotation-versioning/generated/fossildbapi_pb2_grpc.py rename to tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py diff --git a/tools/migration-unified-annotation-versioning/geometry_pb2.py b/tools/migration-unified-annotation-versioning/geometry_pb2.py new file mode 100644 index 00000000000..f0858e0a7a2 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/geometry_pb2.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: geometry.proto +"""Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0egeometry.proto\x12&com.scalableminds.webknossos.datastore\"/\n\x0cVec3IntProto\x12\t\n\x01x\x18\x01 \x02(\x05\x12\t\n\x01y\x18\x02 \x02(\x05\x12\t\n\x01z\x18\x03 \x02(\x05\"$\n\x0cVec2IntProto\x12\t\n\x01x\x18\x01 \x02(\x05\x12\t\n\x01y\x18\x02 \x02(\x05\"Z\n\x12ListOfVec3IntProto\x12\x44\n\x06values\x18\x01 \x03(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\"2\n\x0fVec3DoubleProto\x12\t\n\x01x\x18\x01 \x02(\x01\x12\t\n\x01y\x18\x02 \x02(\x01\x12\t\n\x01z\x18\x03 \x02(\x01\"8\n\nColorProto\x12\t\n\x01r\x18\x01 \x02(\x01\x12\t\n\x01g\x18\x02 \x02(\x01\x12\t\n\x01\x62\x18\x03 \x02(\x01\x12\t\n\x01\x61\x18\x04 \x02(\x01\"\x87\x01\n\x10\x42oundingBoxProto\x12\x45\n\x07topLeft\x18\x01 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec3IntProto\x12\r\n\x05width\x18\x02 \x02(\x05\x12\x0e\n\x06height\x18\x03 \x02(\x05\x12\r\n\x05\x64\x65pth\x18\x04 \x02(\x05\"\xd6\x01\n\x15NamedBoundingBoxProto\x12\n\n\x02id\x18\x01 \x02(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x11\n\tisVisible\x18\x03 \x01(\x08\x12\x41\n\x05\x63olor\x18\x04 \x01(\x0b\x32\x32.com.scalableminds.webknossos.datastore.ColorProto\x12M\n\x0b\x62oundingBox\x18\x05 \x02(\x0b\x32\x38.com.scalableminds.webknossos.datastore.BoundingBoxProto\"8\n\x19\x41\x64\x64itionalCoordinateProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05value\x18\x02 \x02(\x05\"x\n\x13\x41\x64\x64itionalAxisProto\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\r\n\x05index\x18\x02 \x02(\x05\x12\x44\n\x06\x62ounds\x18\x03 \x02(\x0b\x32\x34.com.scalableminds.webknossos.datastore.Vec2IntProto') + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'geometry_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + + DESCRIPTOR._options = None + _VEC3INTPROTO._serialized_start=58 + _VEC3INTPROTO._serialized_end=105 + _VEC2INTPROTO._serialized_start=107 + _VEC2INTPROTO._serialized_end=143 + _LISTOFVEC3INTPROTO._serialized_start=145 + _LISTOFVEC3INTPROTO._serialized_end=235 + _VEC3DOUBLEPROTO._serialized_start=237 + _VEC3DOUBLEPROTO._serialized_end=287 + _COLORPROTO._serialized_start=289 + _COLORPROTO._serialized_end=345 + _BOUNDINGBOXPROTO._serialized_start=348 + _BOUNDINGBOXPROTO._serialized_end=483 + _NAMEDBOUNDINGBOXPROTO._serialized_start=486 + _NAMEDBOUNDINGBOXPROTO._serialized_end=700 + _ADDITIONALCOORDINATEPROTO._serialized_start=702 + _ADDITIONALCOORDINATEPROTO._serialized_end=758 + _ADDITIONALAXISPROTO._serialized_start=760 + _ADDITIONALAXISPROTO._serialized_end=880 +# @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 58c3483c5ed..5434878a257 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -9,8 +9,10 @@ from rich.progress import track import orjson -import generated.fossildbapi_pb2 as proto -import generated.SkeletonTracing_pb2 as skeleton +import fossildbapi_pb2 as proto +import VolumeTracing_pb2 as Volume +import SkeletonTracing_pb2 as Skeleton +import Annotation_pb2 as AnnotationProto from utils import log_since, batch_range from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success @@ -25,7 +27,7 @@ class Migration: def __init__(self, args): self.args = args self.src_stub = connect_to_fossildb("localhost:7155") - # self.dst_stub = connect_to_fossildb("localhost:7199") + self.dst_stub = None # TODO def run(self): start_time = datetime.datetime.now() @@ -40,33 +42,61 @@ def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} ...") # layerId → {version_before → version_after} before = time.time() - layer_version_mapping = self.migrate_updates(annotation) + layer_version_mapping, latest_unified_version = self.migrate_updates(annotation) self.migrate_materialized_layers(annotation, layer_version_mapping) + self.create_and_save_annotation_proto(annotation, latest_unified_version) log_since(before, "") - def migrate_updates(self, annotation) -> LayerVersionMapping: + def migrate_updates(self, annotation) -> Tuple[LayerVersionMapping, int]: + batch_size = 1000 unified_version = 0 version_mapping = {} for tracing_id, layer_type in annotation["layers"].items(): collection = self.update_collection_for_layer_type(layer_type) version_mapping_for_layer = {0: 0} newest_version = self.get_newest_version(tracing_id, collection) - editable_mapping_id_opt = None # TODO parse from newest_version (value needs to be parsed as proto) - for batch_start, batch_end in batch_range(newest_version, 1000): + editable_mapping_id = self.get_editable_mapping_name(tracing_id, layer_type) + for batch_start, batch_end in batch_range(newest_version, batch_size): update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) for version, update_group in update_groups: update_group = self.process_update_group(tracing_id, layer_type, update_group) unified_version += 1 version_mapping_for_layer[version] = unified_version - self.save_update_group(unified_version, update_group) + self.save_update_group(annotation['_id'], unified_version, update_group) version_mapping[tracing_id] = version_mapping_for_layer - if editable_mapping_id_opt is not None: - # TODO migrate editable mapping updates - pass + if editable_mapping_id is not None: + version_mapping_for_mapping = {0: 0} + for batch_start, batch_end in batch_range(newest_version, batch_size): + mapping_update_groups = self.get_update_batch(editable_mapping_id, "editableMappingUpdates", batch_start, batch_end) + for version, update_group in mapping_update_groups: + unified_version += 1 + version_mapping_for_mapping[version] = unified_version + self.save_update_group(annotation['_id'], unified_version, update_group) + version_mapping[editable_mapping_id] = version_mapping_for_mapping # TODO interleave updates rather than concat # TODO handle existing revertToVersion update actions - return version_mapping + return version_mapping, unified_version + + def get_editable_mapping_name(self, tracing_id: str, layer_type: str) -> Optional[str]: + if layer_type == "Skeleton": + return None + tracing_raw = self.get_newest_tracing_raw(tracing_id, "volumes") + if tracing_raw is None: + return None + volume = Volume.VolumeTracing() + volume.ParseFromString(tracing_raw) + if volume.hasEditableMapping: + return volume.mappingName + return None + + def get_newest_tracing_raw(self, tracing_id, collection) -> Optional[bytes]: + getReply = self.src_stub.Get( + proto.GetRequest(collection=collection, key=tracing_id, mayBeEmpty=True) + ) + if getReply.success: + return getReply.value + return None def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes) -> bytes: update_group_parsed = orjson.loads(update_group_raw) @@ -88,9 +118,8 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra return orjson.dumps(update_group_parsed) - def save_update_group(self, version, update_group_raw: bytes) -> None: - print(f"saving update group: {update_group_raw}") - return + def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: + self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) def get_newest_version(self, tracing_id: str, collection: str) -> int: getReply = self.src_stub.Get( @@ -100,9 +129,9 @@ def get_newest_version(self, tracing_id: str, collection: str) -> int: return getReply.actualVersion return 0 - def get_update_batch(self, tracing_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, bytes]]: + def get_update_batch(self, tracing_or_mapping_id: str, collection: str, batch_start: int, batch_end_inclusive: int) -> List[Tuple[int, bytes]]: reply = self.src_stub.GetMultipleVersions( - proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_or_mapping_id, oldestVersion=batch_start, newestVersion=batch_end_inclusive) ) assert_grpc_success(reply) reply.versions.reverse() @@ -128,13 +157,26 @@ def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_ver self.migrate_editable_mapping(tracing_id, layer_version_mapping) def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - self.migrate_versions_untouched("skeletons", tracing_id, layer_version_mapping) + collection = "skeletons" + materialized_versions = self.list_versions(collection, tracing_id) + for materialized_version in materialized_versions: + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + skeleton = Skeleton.SkeletonTracing() + skeleton.ParseFromString(value_bytes) + new_version = layer_version_mapping[tracing_id][materialized_version] + skeleton.version = new_version + self.save_bytes(collection, tracing_id, new_version, skeleton.SerializeToString()) - def migrate_versions_untouched(self, collection: str, key: str, layer_version_mapping: LayerVersionMapping): - materialized_versions = self.list_versions(collection, key) + def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + collection = "volumes" + materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: - value_bytes = self.get_bytes(collection, key, materialized_version) - self.save_bytes(collection, key, layer_version_mapping[key][materialized_version], value_bytes) + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + volume = Volume.VolumeTracing() + volume.ParseFromString(value_bytes) + new_version = layer_version_mapping[tracing_id][materialized_version] + volume.version = new_version + self.save_bytes(collection, tracing_id, new_version, volume.SerializeToString()) def list_versions(self, collection, key) -> List[int]: reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) @@ -147,11 +189,9 @@ def get_bytes(self, collection: str, key: str, version: int) -> bytes: return reply.value def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: - # TODO - pass - - def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - self.migrate_versions_untouched("volumes", tracing_id, layer_version_mapping) + if self.dst_stub is not None: + reply = self.dst.stub.Put(proto.PutRequest(collection=collection, key=key, version=version)) + assert_grpc_success(reply) def migrate_volume_buckets(self, layer, layer_version_mapping): pass @@ -173,8 +213,24 @@ def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_map def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): pass - def insert_annotation_protos(self, annotation, layer_version_mapping): - pass + def create_and_save_annotation_proto(self, annotation, latest_unified_version: int): + version_step = 1000 + for version in range(0, latest_unified_version, version_step): + annotationProto = AnnotationProto.AnnotationProto() + annotationProto.name = annotation["name"] + annotationProto.description = annotation["description"] + annotationProto.version = version + annotationProto.earliestAccessibleVersion = 0 + for tracing_id, tracing_type in annotation["layers"].items(): + layer_proto = AnnotationProto.AnnotationLayerProto() + layer_proto.tracingId = tracing_id + layer_proto.name = "TODO" # TODO fetch this from postgres also + layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Skeleton + if tracing_type == "Volume": + layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Volume + layer_proto.type = layer_type_proto + annotationProto.annotationLayers.append(layer_proto) + self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) def read_annotation_list(self, start_time: datetime): before = time.time() @@ -190,7 +246,7 @@ def read_annotation_list(self, start_time: datetime): page_count = math.ceil(annotation_count / page_size) for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): query = f""" - SELECT a._id, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers + SELECT a._id, a.name, a.description, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers FROM webknossos.annotation_layers al JOIN webknossos.annotations a on al._annotation = a._id WHERE a.modified < {modified_str} From 2772692f25784fa915b56d40e31532e83815989d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 14:42:47 +0100 Subject: [PATCH 212/361] migrate volume buckets --- .../migration.py | 32 +++++++++++++++++-- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 5434878a257..c1eef0d5a3c 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -190,11 +190,33 @@ def get_bytes(self, collection: str, key: str, version: int) -> bytes: def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: if self.dst_stub is not None: - reply = self.dst.stub.Put(proto.PutRequest(collection=collection, key=key, version=version)) + reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version)) assert_grpc_success(reply) - def migrate_volume_buckets(self, layer, layer_version_mapping): - pass + def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + collection = "volumeData" + list_keys_page_size = 5000 + versions_page_size = 500 + current_start_after_key = tracing_id + "/0" # lexicographically before /1 (or any /mag) + while True: + list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) + assert_grpc_success(list_keys_reply) + if len(list_keys_reply.keys) == 0: + # We iterated towards the very end of the volumeData collection + return + for key in list_keys_reply.keys: + if key.startswith(tracing_id): + # TODO paginate versions + get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key)) + assert_grpc_success(get_versions_reply) + new_key = self.remove_morton_index(key) + for version, value in zip(get_versions_reply.versions, get_versions_reply.values): + new_version = layer_version_mapping[tracing_id][version] + self.save_bytes(collection, new_key, new_version, value) + current_start_after_key = key + else: + # We iterated past the buckets of the current tracing + return def migrate_segment_index(self, layer, layer_version_mapping): pass @@ -259,3 +281,7 @@ def read_annotation_list(self, start_time: datetime): annotations += cursor.fetchall() log_since(before, "Loading annotations") return annotations + + def remove_morton_index(self, bucket_key: str) -> str: + # TODO + return bucket_key From dcc08e60a64c99d6969297b67059c85bf3157ea5 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 15:26:53 +0100 Subject: [PATCH 213/361] migration: annotation layer name, volume segment index --- .../migration.py | 34 +++++++++++-------- .../utils.py | 1 + 2 files changed, 21 insertions(+), 14 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index c1eef0d5a3c..980d82a0623 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -5,7 +5,7 @@ import logging import datetime import time -from typing import Dict, Tuple, List, Optional +from typing import Dict, Tuple, List, Optional, Callable from rich.progress import track import orjson @@ -194,10 +194,13 @@ def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> N assert_grpc_success(reply) def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - collection = "volumeData" + self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) + + def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): list_keys_page_size = 5000 versions_page_size = 500 - current_start_after_key = tracing_id + "/0" # lexicographically before /1 (or any /mag) + current_start_after_key = tracing_id + "." # . is lexicographically before / + newest_tracing_version = max(layer_version_mapping[tracing_id].keys()) while True: list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) assert_grpc_success(list_keys_reply) @@ -206,20 +209,23 @@ def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVe return for key in list_keys_reply.keys: if key.startswith(tracing_id): - # TODO paginate versions - get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key)) - assert_grpc_success(get_versions_reply) - new_key = self.remove_morton_index(key) - for version, value in zip(get_versions_reply.versions, get_versions_reply.values): - new_version = layer_version_mapping[tracing_id][version] - self.save_bytes(collection, new_key, new_version, value) + for version_range_start, version_range_end in batch_range(newest_tracing_version, versions_page_size): + get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key, oldestVersion=version_range_start, newestVersion=version_range_end)) + assert_grpc_success(get_versions_reply) + new_key = key + if transform_key is not None: + new_key = transform_key(key) + for version, value in zip(get_versions_reply.versions, get_versions_reply.values): + new_version = layer_version_mapping[tracing_id][version] + self.save_bytes(collection, new_key, new_version, value) current_start_after_key = key else: # We iterated past the buckets of the current tracing return - def migrate_segment_index(self, layer, layer_version_mapping): - pass + def migrate_segment_index(self, tracing_id, layer_version_mapping): + self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) + def migrate_editable_mapping(self, layer, layer_version_mapping): self.migrate_editable_mapping_info(layer, layer_version_mapping) @@ -246,7 +252,7 @@ def create_and_save_annotation_proto(self, annotation, latest_unified_version: i for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id - layer_proto.name = "TODO" # TODO fetch this from postgres also + layer_proto.name = annotation["layernames"][tracing_id] layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Skeleton if tracing_type == "Volume": layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Volume @@ -268,7 +274,7 @@ def read_annotation_list(self, start_time: datetime): page_count = math.ceil(annotation_count / page_size) for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): query = f""" - SELECT a._id, a.name, a.description, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers + SELECT a._id, a.name, a.description, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers, JSON_OBJECT_AGG(al.tracingId, al.name) AS layerNames FROM webknossos.annotation_layers al JOIN webknossos.annotations a on al._annotation = a._id WHERE a.modified < {modified_str} diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index 3c7ff86026f..68e788279e1 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -22,6 +22,7 @@ def log_since(before, label: str) -> None: logger.info(f"{label} took {diff:.2f} s") +# TODO should we go to limit + 1? def batch_range( limit: int, batch_size: int ) -> Iterator[Tuple[int, int]]: From b5d3b48e3a6cd805be6beedd185e989704463eb2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 25 Nov 2024 15:42:57 +0100 Subject: [PATCH 214/361] migrate editable mapping distributed elements --- .../migration.py | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 980d82a0623..cde7d10de42 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -190,7 +190,7 @@ def get_bytes(self, collection: str, key: str, version: int) -> bytes: def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> None: if self.dst_stub is not None: - reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version)) + reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version, value=value)) assert_grpc_success(reply) def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): @@ -205,7 +205,7 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) assert_grpc_success(list_keys_reply) if len(list_keys_reply.keys) == 0: - # We iterated towards the very end of the volumeData collection + # We iterated towards the very end of the collection return for key in list_keys_reply.keys: if key.startswith(tracing_id): @@ -220,26 +220,35 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: self.save_bytes(collection, new_key, new_version, value) current_start_after_key = key else: - # We iterated past the buckets of the current tracing + # We iterated past the elements of the current tracing return def migrate_segment_index(self, tracing_id, layer_version_mapping): self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) - - def migrate_editable_mapping(self, layer, layer_version_mapping): - self.migrate_editable_mapping_info(layer, layer_version_mapping) - self.migrate_editable_mapping_agglomerate_to_graph(layer, layer_version_mapping) - self.migrate_editable_mapping_segment_to_agglomerate(layer, layer_version_mapping) + def migrate_editable_mapping(self, tracing_id, layer_version_mapping): + self.migrate_editable_mapping_info(tracing_id, layer_version_mapping) + self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, layer_version_mapping) + self.migrate_editable_mapping_segment_to_agglomerate(tracing_id, layer_version_mapping) def migrate_editable_mapping_info(self, layer, layer_version_mapping): pass - def migrate_editable_mapping_agglomerate_to_graph(self, layer, layer_version_mapping): - pass + def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_all_versions_and_keys_with_prefix( + "editableMappingsAgglomerateToGraph", + tracing_id, + layer_version_mapping, + None + ) - def migrate_editable_mapping_segment_to_agglomerate(self, layer, layer_version_mapping): - pass + def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + self.migrate_all_versions_and_keys_with_prefix( + "editableMappingsSegmentToAgglomerate", + tracing_id, + layer_version_mapping, + None + ) def create_and_save_annotation_proto(self, annotation, latest_unified_version: int): version_step = 1000 @@ -260,7 +269,7 @@ def create_and_save_annotation_proto(self, annotation, latest_unified_version: i annotationProto.annotationLayers.append(layer_proto) self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) - def read_annotation_list(self, start_time: datetime): + def read_annotation_list(self, start_time): before = time.time() logger.info("Determining annotation count from postgres...") page_size = 10000 From ef191d1e17b692c73b8d96db34cee414a55c59bb Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 26 Nov 2024 13:33:26 +0100 Subject: [PATCH 215/361] migration: remove morton index, migrate editablemappinginfo objects --- .../migration.py | 58 +++++++++++++------ 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index cde7d10de42..1ab93dc5e44 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -19,7 +19,8 @@ logger = logging.getLogger(__name__) -LayerVersionMapping = Dict[str, Dict[int, int]] +LayerVersionMapping = Dict[str, Dict[int, int]] # tracing id to (old version to new version) +MappingIdMap = Dict[str, str] # tracing id to editable mapping id class Migration: @@ -42,12 +43,22 @@ def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} ...") # layerId → {version_before → version_after} before = time.time() - layer_version_mapping, latest_unified_version = self.migrate_updates(annotation) - self.migrate_materialized_layers(annotation, layer_version_mapping) + mapping_id_map = self.build_mapping_id_map(annotation) + layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) + self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) self.create_and_save_annotation_proto(annotation, latest_unified_version) log_since(before, "") - def migrate_updates(self, annotation) -> Tuple[LayerVersionMapping, int]: + def build_mapping_id_map(self, annotation) -> MappingIdMap: + mapping_id_map = {} + for tracing_id, layer_type in annotation["layers"].items(): + if layer_type == "Volume": + editable_mapping_id = self.get_editable_mapping_id(tracing_id, layer_type) + if editable_mapping_id is not None: + mapping_id_map[tracing_id] = editable_mapping_id + return mapping_id_map + + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[LayerVersionMapping, int]: batch_size = 1000 unified_version = 0 version_mapping = {} @@ -55,7 +66,6 @@ def migrate_updates(self, annotation) -> Tuple[LayerVersionMapping, int]: collection = self.update_collection_for_layer_type(layer_type) version_mapping_for_layer = {0: 0} newest_version = self.get_newest_version(tracing_id, collection) - editable_mapping_id = self.get_editable_mapping_name(tracing_id, layer_type) for batch_start, batch_end in batch_range(newest_version, batch_size): update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) for version, update_group in update_groups: @@ -64,7 +74,8 @@ def migrate_updates(self, annotation) -> Tuple[LayerVersionMapping, int]: version_mapping_for_layer[version] = unified_version self.save_update_group(annotation['_id'], unified_version, update_group) version_mapping[tracing_id] = version_mapping_for_layer - if editable_mapping_id is not None: + if tracing_id in mapping_id_map: + editable_mapping_id = mapping_id_map[tracing_id] version_mapping_for_mapping = {0: 0} for batch_start, batch_end in batch_range(newest_version, batch_size): mapping_update_groups = self.get_update_batch(editable_mapping_id, "editableMappingUpdates", batch_start, batch_end) @@ -78,7 +89,7 @@ def migrate_updates(self, annotation) -> Tuple[LayerVersionMapping, int]: # TODO handle existing revertToVersion update actions return version_mapping, unified_version - def get_editable_mapping_name(self, tracing_id: str, layer_type: str) -> Optional[str]: + def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: if layer_type == "Skeleton": return None tracing_raw = self.get_newest_tracing_raw(tracing_id, "volumes") @@ -143,21 +154,22 @@ def update_collection_for_layer_type(self, layer_type): return "skeletonUpdates" return "volumeUpdates" - def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping): + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): for tracing_id, tracing_type in annotation["layers"].items(): - self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping) + self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping, mapping_id_map) - def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping): + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): if layer_type == "Skeleton": self.migrate_skeleton_proto(tracing_id, layer_version_mapping) if layer_type == "Volume": self.migrate_volume_proto(tracing_id, layer_version_mapping) self.migrate_volume_buckets(tracing_id, layer_version_mapping) self.migrate_segment_index(tracing_id, layer_version_mapping) - self.migrate_editable_mapping(tracing_id, layer_version_mapping) + self.migrate_editable_mapping(tracing_id, layer_version_mapping, mapping_id_map) def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): collection = "skeletons" + # todo paginate materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, tracing_id, materialized_version) @@ -169,6 +181,7 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): collection = "volumes" + # todo paginate materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, tracing_id, materialized_version) @@ -226,13 +239,22 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: def migrate_segment_index(self, tracing_id, layer_version_mapping): self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) - def migrate_editable_mapping(self, tracing_id, layer_version_mapping): - self.migrate_editable_mapping_info(tracing_id, layer_version_mapping) + def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + self.migrate_editable_mapping_info(tracing_id, layer_version_mapping, mapping_id_map) self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, layer_version_mapping) self.migrate_editable_mapping_segment_to_agglomerate(tracing_id, layer_version_mapping) - def migrate_editable_mapping_info(self, layer, layer_version_mapping): - pass + def migrate_editable_mapping_info(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + if tracing_id not in mapping_id_map: + return + mapping_id = mapping_id_map[tracing_id] + collection = "editableMappingsInfo" + # todo paginate + materialized_versions = self.list_versions(collection, mapping_id) + for materialized_version in materialized_versions: + value_bytes = self.get_bytes(collection, mapping_id, materialized_version) + new_version = layer_version_mapping[mapping_id][materialized_version] + self.save_bytes(collection, mapping_id, new_version, value_bytes) def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): self.migrate_all_versions_and_keys_with_prefix( @@ -298,5 +320,7 @@ def read_annotation_list(self, start_time): return annotations def remove_morton_index(self, bucket_key: str) -> str: - # TODO - return bucket_key + first_slash_index = bucket_key.index('/') + second_slash_index = bucket_key.index('/', first_slash_index + 1) + first_bracket_index = bucket_key.index('[') + return bucket_key[:second_slash_index + 1] + bucket_key[first_bracket_index:] From ab5cddb775695966dd5163a935ffc12b5c649892 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 26 Nov 2024 14:02:53 +0100 Subject: [PATCH 216/361] migration: switch from orjson to msgspec --- .../migration.py | 11 +++++------ .../requirements.txt | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 1ab93dc5e44..913d09ecd9c 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -7,7 +7,7 @@ import time from typing import Dict, Tuple, List, Optional, Callable from rich.progress import track -import orjson +import msgspec import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -29,6 +29,8 @@ def __init__(self, args): self.args = args self.src_stub = connect_to_fossildb("localhost:7155") self.dst_stub = None # TODO + self.json_encoder = msgspec.json.Encoder() + self.json_decoder = msgspec.json.Decoder() def run(self): start_time = datetime.datetime.now() @@ -110,7 +112,7 @@ def get_newest_tracing_raw(self, tracing_id, collection) -> Optional[bytes]: return None def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes) -> bytes: - update_group_parsed = orjson.loads(update_group_raw) + update_group_parsed = self.json_decoder.decode(update_group_raw) # TODO handle existing revertToVersion update actions @@ -127,7 +129,7 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra if not name == "updateTdCamera": update["value"]["actionTracinId"] = tracing_id - return orjson.dumps(update_group_parsed) + return self.json_encoder.encode(update_group_parsed) def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) @@ -169,7 +171,6 @@ def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_ver def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): collection = "skeletons" - # todo paginate materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, tracing_id, materialized_version) @@ -181,7 +182,6 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): collection = "volumes" - # todo paginate materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, tracing_id, materialized_version) @@ -249,7 +249,6 @@ def migrate_editable_mapping_info(self, tracing_id: str, layer_version_mapping: return mapping_id = mapping_id_map[tracing_id] collection = "editableMappingsInfo" - # todo paginate materialized_versions = self.list_versions(collection, mapping_id) for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, mapping_id, materialized_version) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt index 28223d2d908..b9ccd512c41 100644 --- a/tools/migration-unified-annotation-versioning/requirements.txt +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -3,4 +3,4 @@ argparse psycopg2 protobuf rich -orjson +msgspec From e2e513fd2e2214b6d3c98aaa14f0aa70ec3fee7f Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 26 Nov 2024 14:54:11 +0100 Subject: [PATCH 217/361] migration: add cli args, actually write to target fossilDB --- .../connections.py | 4 ++-- tools/migration-unified-annotation-versioning/main.py | 10 +++++++++- .../migration.py | 10 ++++++---- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py index 084fe641262..9e705f50b87 100644 --- a/tools/migration-unified-annotation-versioning/connections.py +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -9,11 +9,11 @@ logger = logging.getLogger(__name__) -def connect_to_fossildb(host): +def connect_to_fossildb(host: str, label: str): max_message_length = 2147483647 channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) stub = proto_rpc.FossilDBStub(channel) - test_fossildb_health(stub, f"Fossildb at {host}") + test_fossildb_health(stub, f"{label} FossilDB at {host}") return stub diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 7d0d9c66341..8a67408749c 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 import logging +import argparse + from migration import Migration from utils import setup_logging @@ -10,7 +12,13 @@ def main(): setup_logging() logger.info("Hello from Unified Annotation Versioning Migration!") - args = {} + parser = argparse.ArgumentParser() + parser.add_argument("--src", type=str, help="Source fossildb host and port. Example: localhost:7155", required=True) + parser.add_argument("--dst", type=str, help="Destination fossildb host and port", required=False) + parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") + args = parser.parse_args() + if args.dst is None and not args.dry: + parser.error("At least one of --dry or --dst is required") migration = Migration(args) migration.run() diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 913d09ecd9c..290a34ed8c6 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -26,9 +26,12 @@ class Migration: def __init__(self, args): + logger.info(f"Initializing migration with args {args} ...") self.args = args - self.src_stub = connect_to_fossildb("localhost:7155") - self.dst_stub = None # TODO + self.src_stub = connect_to_fossildb(args.src, "source") + self.dst_stub = None + if not args.dry: + self.dst_stub = connect_to_fossildb(args.dst, "destination") self.json_encoder = msgspec.json.Encoder() self.json_decoder = msgspec.json.Decoder() @@ -43,7 +46,6 @@ def run(self): def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} ...") - # layerId → {version_before → version_after} before = time.time() mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) @@ -127,7 +129,7 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra update["name"] = f"updateUserBoundingBoxVisibilityIn{layer_type}Tracing" if not name == "updateTdCamera": - update["value"]["actionTracinId"] = tracing_id + update["value"]["actionTracingId"] = tracing_id return self.json_encoder.encode(update_group_parsed) From d314f8a9ba93ee3c68ead820d533906450d07677 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 27 Nov 2024 10:17:29 +0100 Subject: [PATCH 218/361] parallelize, add postgres connection parameter --- .../connections.py | 26 ++++++- .../main.py | 2 + .../migration.py | 72 +++++++++++++------ .../utils.py | 6 +- 4 files changed, 79 insertions(+), 27 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py index 9e705f50b87..d5b85704ac9 100644 --- a/tools/migration-unified-annotation-versioning/connections.py +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -2,6 +2,9 @@ import psycopg2 import psycopg2.extras import logging +import re +from typing import Dict, Any +import os import fossildbapi_pb2 as proto import fossildbapi_pb2_grpc as proto_rpc @@ -20,7 +23,7 @@ def connect_to_fossildb(host: str, label: str): def test_fossildb_health(stub, label): reply = stub.Health(proto.HealthRequest()) assert_grpc_success(reply) - logger.info('Successfully connected to ' + label) + logger.info(f"Successfully connected to {label}") def assert_grpc_success(reply): @@ -28,5 +31,22 @@ def assert_grpc_success(reply): raise Exception("reply.success failed: " + reply.errorMessage) -def connect_to_postgres(): - return psycopg2.connect(host="localhost", port=5432, database="webknossos", user='postgres', password='postgres') +def connect_to_postgres(postgres_config: str): + parsed = parse_connection_string(postgres_config) + password = os.environ.get("PG_PASSWORD", "postgres") + return psycopg2.connect(host=parsed["host"], port=parsed["port"], database=parsed["database"], user=parsed["user"], password="postgres") + + +def parse_connection_string(connection_string: str) -> Dict[str, Any]: + pattern = r"^(?P\w+)@(?!.*@)(?P[^:/]+)(?::(?P\d+))?(?P/[^ ]*)?$" + + match = re.match(pattern, connection_string) + if match: + return { + "user": match.group("user"), + "host": match.group("host"), + "port": int(match.group("port")), + "database": match.group("database").lstrip("/") + } + else: + raise ValueError("Invalid postgres connection string, needs to be user@host:port/database.") diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 8a67408749c..eb49acb1dad 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -16,6 +16,8 @@ def main(): parser.add_argument("--src", type=str, help="Source fossildb host and port. Example: localhost:7155", required=True) parser.add_argument("--dst", type=str, help="Destination fossildb host and port", required=False) parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") + parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) + parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") args = parser.parse_args() if args.dst is None and not args.dry: parser.error("At least one of --dry or --dst is required") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 290a34ed8c6..a4cf83eb4c9 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -8,6 +8,8 @@ from typing import Dict, Tuple, List, Optional, Callable from rich.progress import track import msgspec +import concurrent.futures +import threading import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -34,14 +36,22 @@ def __init__(self, args): self.dst_stub = connect_to_fossildb(args.dst, "destination") self.json_encoder = msgspec.json.Encoder() self.json_decoder = msgspec.json.Decoder() + self.done_count = None + self.done_count_lock = threading.Lock() + self.total_count = None def run(self): start_time = datetime.datetime.now() before = time.time() logger.info(f"Using start time {start_time}") annotations = self.read_annotation_list(start_time) - for annotation in annotations: - self.migrate_annotation(annotation) + self.done_count = 0 + self.total_count = len(annotations) + + before_submit = time.time() + with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: + executor.map(self.migrate_annotation, annotations) + log_since(before_submit, f"Submitting {self.total_count} futures to ThreadPoolExecutor via map") log_since(before, "Migrating all the things") def migrate_annotation(self, annotation): @@ -51,7 +61,10 @@ def migrate_annotation(self, annotation): layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) self.create_and_save_annotation_proto(annotation, latest_unified_version) - log_since(before, "") + with self.done_count_lock: + self.done_count += 1 + log_since(before, "", self.get_progress()) + def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map = {} @@ -175,23 +188,27 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe collection = "skeletons" materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: - value_bytes = self.get_bytes(collection, tracing_id, materialized_version) - skeleton = Skeleton.SkeletonTracing() - skeleton.ParseFromString(value_bytes) new_version = layer_version_mapping[tracing_id][materialized_version] - skeleton.version = new_version - self.save_bytes(collection, tracing_id, new_version, skeleton.SerializeToString()) + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + if materialized_version != new_version: + skeleton = Skeleton.SkeletonTracing() + skeleton.ParseFromString(value_bytes) + skeleton.version = new_version + value_bytes = skeleton.SerializeToString() + self.save_bytes(collection, tracing_id, new_version, value_bytes) def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): collection = "volumes" materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: - value_bytes = self.get_bytes(collection, tracing_id, materialized_version) - volume = Volume.VolumeTracing() - volume.ParseFromString(value_bytes) new_version = layer_version_mapping[tracing_id][materialized_version] - volume.version = new_version - self.save_bytes(collection, tracing_id, new_version, volume.SerializeToString()) + value_bytes = self.get_bytes(collection, tracing_id, materialized_version) + if materialized_version != new_version: + volume = Volume.VolumeTracing() + volume.ParseFromString(value_bytes) + volume.version = new_version + value_bytes = volume.SerializeToString() + self.save_bytes(collection, tracing_id, new_version, value_bytes) def list_versions(self, collection, key) -> List[int]: reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) @@ -296,7 +313,7 @@ def read_annotation_list(self, start_time): before = time.time() logger.info("Determining annotation count from postgres...") page_size = 10000 - connection = connect_to_postgres() + connection = connect_to_postgres(self.args.postgres) cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") @@ -306,14 +323,21 @@ def read_annotation_list(self, start_time): page_count = math.ceil(annotation_count / page_size) for page_num in track(range(page_count), total=page_count, description=f"Loading annotation infos ..."): query = f""" - SELECT a._id, a.name, a.description, a.created, a.modified, JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers, JSON_OBJECT_AGG(al.tracingId, al.name) AS layerNames + WITH annotations AS ( + SELECT _id, name, description, created, modified FROM webknossos.annotations + WHERE modified < {modified_str} + ORDER BY _id + LIMIT {page_size} + OFFSET {page_size * page_num} + ) + + SELECT + a._id, a.name, a.description, a.created, a.modified, + JSON_OBJECT_AGG(al.tracingId, al.typ) AS layers, + JSON_OBJECT_AGG(al.tracingId, al.name) AS layerNames FROM webknossos.annotation_layers al - JOIN webknossos.annotations a on al._annotation = a._id - WHERE a.modified < {modified_str} - GROUP BY a._id - ORDER BY a._id - LIMIT {page_size} - OFFSET {page_size * page_num} + JOIN annotations a on al._annotation = a._id + GROUP BY a._id, a.name, a.description, a.created, a.modified """ cursor.execute(query) annotations += cursor.fetchall() @@ -325,3 +349,9 @@ def remove_morton_index(self, bucket_key: str) -> str: second_slash_index = bucket_key.index('/', first_slash_index + 1) first_bracket_index = bucket_key.index('[') return bucket_key[:second_slash_index + 1] + bucket_key[first_bracket_index:] + + def get_progress(self) -> str: + with self.done_count_lock: + done_count = self.done_count + percentage = 100.0 * done_count / self.total_count + return f". ({done_count}/{self.total_count}={percentage:.1f}% are done.)" diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index 68e788279e1..c9e6b62e7ce 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -12,14 +12,14 @@ def setup_logging(): handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s") + formatter = logging.Formatter("%(asctime)s %(levelname)-8s [%(threadName)s] %(message)s") handler.setFormatter(formatter) root.addHandler(handler) -def log_since(before, label: str) -> None: +def log_since(before, label: str, postfix: str = "") -> None: diff = time.time() - before - logger.info(f"{label} took {diff:.2f} s") + logger.info(f"{label} took {diff:.2f} s{postfix}") # TODO should we go to limit + 1? From 35a0e48091d756fc4aca6bf912b08e0555221750 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 27 Nov 2024 10:19:06 +0100 Subject: [PATCH 219/361] avoid redundant calls in task creation (regression introduced in cba70af92fd0f02d08ee250341461fa985142324) --- .../admin/task/task_create_form_view.tsx | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 8b90128c6dc..8ced78e01a8 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -479,17 +479,11 @@ function TaskCreateFormView({ taskId, history }: Props) { return Promise.resolve(); } - const annotationResponse = - (await tryToAwaitPromise( - getMaybeOutdatedAnnotationInformation(value, { - showErrorToast: false, - }), - )) || - (await tryToAwaitPromise( - getMaybeOutdatedAnnotationInformation(value, { - showErrorToast: false, - }), - )); + const annotationResponse = await tryToAwaitPromise( + getMaybeOutdatedAnnotationInformation(value, { + showErrorToast: false, + }), + ); if (annotationResponse?.dataSetName != null) { form.setFieldsValue({ From 7036656a7f27966069cd34161fef086bf4976243 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 27 Nov 2024 10:54:07 +0100 Subject: [PATCH 220/361] migration: humanize time diff add previous-start argument --- .../main.py | 1 + .../migration.py | 25 +++++++------ .../utils.py | 36 ++++++++++++++++++- 3 files changed, 50 insertions(+), 12 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index eb49acb1dad..12e988fe2e9 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -18,6 +18,7 @@ def main(): parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") + parser.add_argument("--previous_start", help="Previous run start time. Example: 2024-11-27 10:37:30.171083", type=str) args = parser.parse_args() if args.dst is None and not args.dry: parser.error("At least one of --dry or --dst is required") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index a4cf83eb4c9..29009cd738c 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -41,17 +41,13 @@ def __init__(self, args): self.total_count = None def run(self): - start_time = datetime.datetime.now() before = time.time() - logger.info(f"Using start time {start_time}") - annotations = self.read_annotation_list(start_time) + annotations = self.read_annotation_list() self.done_count = 0 self.total_count = len(annotations) - before_submit = time.time() with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: executor.map(self.migrate_annotation, annotations) - log_since(before_submit, f"Submitting {self.total_count} futures to ThreadPoolExecutor via map") log_since(before, "Migrating all the things") def migrate_annotation(self, annotation): @@ -63,8 +59,7 @@ def migrate_annotation(self, annotation): self.create_and_save_annotation_proto(annotation, latest_unified_version) with self.done_count_lock: self.done_count += 1 - log_since(before, "", self.get_progress()) - + log_since(before, f"Migrating annotation {annotation['_id']}", self.get_progress()) def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map = {} @@ -309,14 +304,21 @@ def create_and_save_annotation_proto(self, annotation, latest_unified_version: i annotationProto.annotationLayers.append(layer_proto) self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) - def read_annotation_list(self, start_time): + def read_annotation_list(self): before = time.time() + start_time = datetime.datetime.now() + previous_start_label = "" + previous_start_query = "" + if self.args.previous_start is not None: + previous_start_label = f"and after previous start time {self.args.previous_start}" + previous_start_query = f" AND modified > '{self.args.previous_start}'" + logger.info(f"Looking only for annotations last modified before start time {start_time}{previous_start_label}.") logger.info("Determining annotation count from postgres...") page_size = 10000 connection = connect_to_postgres(self.args.postgres) cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) - modified_str = start_time.strftime("'%Y-%m-%d %H:%M:%S'") - cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < {modified_str}") + + cursor.execute(f"SELECT COUNT(*) FROM webknossos.annotations WHERE modified < '{start_time}'{previous_start_query}") annotation_count = cursor.fetchone()['count'] logger.info(f"Loading infos of {annotation_count} annotations from postgres ...") annotations = [] @@ -325,7 +327,8 @@ def read_annotation_list(self, start_time): query = f""" WITH annotations AS ( SELECT _id, name, description, created, modified FROM webknossos.annotations - WHERE modified < {modified_str} + WHERE modified < '{start_time}' + {previous_start_query} ORDER BY _id LIMIT {page_size} OFFSET {page_size * page_num} diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index c9e6b62e7ce..fa288e56c80 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -2,6 +2,7 @@ import time from typing import Iterator, Tuple import sys +from math import floor, ceil logger = logging.getLogger(__name__) @@ -19,7 +20,7 @@ def setup_logging(): def log_since(before, label: str, postfix: str = "") -> None: diff = time.time() - before - logger.info(f"{label} took {diff:.2f} s{postfix}") + logger.info(f"{label} took {humanize_time_diff(diff)}{postfix}") # TODO should we go to limit + 1? @@ -33,3 +34,36 @@ def batch_range( if i + batch_size >= full_range.stop: return + + +def humanize_time_diff(seconds: float) -> str: + def pluralize(string: str, amount: int) -> str: + return string if amount == 1 else string + "s" + + max_elements = 3 + + label_elements = [] + + days = floor(seconds / 3600 / 24) + if days > 0 and len(label_elements) < max_elements: + label_elements.append(pluralize(f"{days} day", days)) + seconds -= days * 24 * 3600 + + hours = floor(seconds / 3600) + if hours > 0 and len(label_elements) < max_elements: + label_elements.append(f"{hours}h") + seconds -= hours * 3600 + + minutes = floor(seconds / 60) + if minutes > 0 and len(label_elements) < max_elements: + label_elements.append(f"{minutes}m") + seconds -= minutes * 60 + + whole_seconds = ceil(seconds) + if seconds >= 0 and len(label_elements) < max_elements: + if len(label_elements) < 1: + label_elements.append(f"{seconds:.2f}s") + else: + label_elements.append(f"{whole_seconds}s") + + return " ".join(label_elements) From 45850438f8dc6ce1694be63503ab07b54cf22fcc Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 27 Nov 2024 12:44:07 +0100 Subject: [PATCH 221/361] migration: wip handle compacted update actions, add error logging + counting --- .../migration.py | 60 +++++++--- .../annotation/TSAnnotationService.scala | 2 + .../annotation/UpdateActions.scala | 112 ++++++++++-------- .../tracings/volume/VolumeUpdateActions.scala | 34 ++++++ 4 files changed, 139 insertions(+), 69 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 29009cd738c..ba9cfb11dc7 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -34,32 +34,41 @@ def __init__(self, args): self.dst_stub = None if not args.dry: self.dst_stub = connect_to_fossildb(args.dst, "destination") - self.json_encoder = msgspec.json.Encoder() - self.json_decoder = msgspec.json.Decoder() self.done_count = None self.done_count_lock = threading.Lock() + self.failure_count = 0 + self.failure_count_lock = threading.Lock() self.total_count = None def run(self): before = time.time() annotations = self.read_annotation_list() self.done_count = 0 + self.failure_count = 0 self.total_count = len(annotations) with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: executor.map(self.migrate_annotation, annotations) + if self.failure_count > 0: + logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") log_since(before, "Migrating all the things") def migrate_annotation(self, annotation): - logger.info(f"Migrating annotation {annotation['_id']} ...") + logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") before = time.time() - mapping_id_map = self.build_mapping_id_map(annotation) - layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) - self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) - self.create_and_save_annotation_proto(annotation, latest_unified_version) - with self.done_count_lock: - self.done_count += 1 - log_since(before, f"Migrating annotation {annotation['_id']}", self.get_progress()) + try: + mapping_id_map = self.build_mapping_id_map(annotation) + layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) + self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + self.create_and_save_annotation_proto(annotation, latest_unified_version) + log_since(before, f"Migrating annotation {annotation['_id']}", self.get_progress()) + except Exception: + logger.exception(f"Exception while migrating annotation {annotation['_id']}:") + with self.failure_count_lock: + self.failure_count += 1 + finally: + with self.done_count_lock: + self.done_count += 1 def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map = {} @@ -71,6 +80,8 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: return mapping_id_map def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[LayerVersionMapping, int]: + json_encoder = msgspec.json.Encoder() + json_decoder = msgspec.json.Decoder() batch_size = 1000 unified_version = 0 version_mapping = {} @@ -81,7 +92,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[Lay for batch_start, batch_end in batch_range(newest_version, batch_size): update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) for version, update_group in update_groups: - update_group = self.process_update_group(tracing_id, layer_type, update_group) + update_group = self.process_update_group(tracing_id, layer_type, update_group, json_encoder, json_decoder) unified_version += 1 version_mapping_for_layer[version] = unified_version self.save_update_group(annotation['_id'], unified_version, update_group) @@ -121,14 +132,15 @@ def get_newest_tracing_raw(self, tracing_id, collection) -> Optional[bytes]: return getReply.value return None - def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes) -> bytes: - update_group_parsed = self.json_decoder.decode(update_group_raw) + def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes, json_encoder, json_decoder) -> bytes: + update_group_parsed = json_decoder.decode(update_group_raw) # TODO handle existing revertToVersion update actions for update in update_group_parsed: name = update["name"] + # renamings if name == "updateTracing": update["name"] = f"update{layer_type}Tracing" elif name == "updateUserBoundingBoxes": @@ -136,10 +148,24 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra elif name == "updateUserBoundingBoxVisibility": update["name"] = f"updateUserBoundingBoxVisibilityIn{layer_type}Tracing" + name = update["name"] + + # add actionTracingId if not name == "updateTdCamera": update["value"]["actionTracingId"] = tracing_id - return self.json_encoder.encode(update_group_parsed) + # identify compact update actions, and mark them + # Note: cannot identify compacted actions of the classes + # CreateSegmentVolumeActions UpdateSegmentVolumeAction, UpdateMappingNameAction + # as all their fields are optional + if (name == "updateBucket" and "position" not in update) \ + or (name == "updateVolumeTracing" and "activeSegmentId" not in update["value"]) \ + or (name == "updateUserBoundingBoxesInVolumeTracing" and "boundingBoxes" not in update) \ + or (name == "updateUserBoundingBoxVisibilityInVolumeTracing" and "boundingBoxId" not in update) \ + or (name == "deleteSegmentData" and "id" not in update): + update["isCompacted"] = True + + return json_encoder.encode(update_group_parsed) def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) @@ -310,7 +336,7 @@ def read_annotation_list(self): previous_start_label = "" previous_start_query = "" if self.args.previous_start is not None: - previous_start_label = f"and after previous start time {self.args.previous_start}" + previous_start_label = f" and after previous start time {self.args.previous_start}" previous_start_query = f" AND modified > '{self.args.previous_start}'" logger.info(f"Looking only for annotations last modified before start time {start_time}{previous_start_label}.") logger.info("Determining annotation count from postgres...") @@ -344,7 +370,7 @@ def read_annotation_list(self): """ cursor.execute(query) annotations += cursor.fetchall() - log_since(before, "Loading annotations") + log_since(before, "Loading annotation infos from postgres") return annotations def remove_morton_index(self, bucket_key: str) -> str: @@ -357,4 +383,4 @@ def get_progress(self) -> str: with self.done_count_lock: done_count = self.done_count percentage = 100.0 * done_count / self.total_count - return f". ({done_count}/{self.total_count}={percentage:.1f}% are done.)" + return f". ({done_count}/{self.total_count} = {percentage:.1f}% are done)" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index b2363902f77..78752d93ce4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -158,6 +158,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss resetToBase(annotationId, annotationWithTracings, targetVersion) case _: BucketMutatingVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. + case _: CompactVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as legacy compacted update actions cannot be applied case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index b4001397916..fe6389b810f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -1,6 +1,9 @@ package com.scalableminds.webknossos.tracingstore.annotation -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{MergeAgglomerateUpdateAction, SplitAgglomerateUpdateAction} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ + MergeAgglomerateUpdateAction, + SplitAgglomerateUpdateAction +} import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating._ import com.scalableminds.webknossos.tracingstore.tracings.volume._ import play.api.libs.json._ @@ -29,57 +32,62 @@ object UpdateAction { implicit object updateActionFormat extends Format[UpdateAction] { override def reads(json: JsValue): JsResult[UpdateAction] = { val jsonValue = (json \ "value").as[JsObject] - (json \ "name").as[String] match { - // Skeleton - case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) - case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) - case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) - case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) - case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) - case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) - case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) - case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) - case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) - case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) - case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) - case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) - case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) - case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) - case "updateUserBoundingBoxesInSkeletonTracing" => deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) - case "updateUserBoundingBoxVisibilityInSkeletonTracing" => - deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) - - // Volume - case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) - case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) - case "updateUserBoundingBoxesInVolumeTracing" => - deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) - case "updateUserBoundingBoxVisibilityInVolumeTracing" => - deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) - case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) - case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) - case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) - case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) - case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) - case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) - case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) - case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) - - // Editable Mapping - case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) - case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) - - // Annotation - case "addLayerToAnnotation" => deserialize[AddLayerAnnotationAction](jsonValue) - case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationAction](jsonValue) - case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) - case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) - case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) - case "resetToBase" => deserialize[ResetToBaseAnnotationAction](jsonValue) - case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) - - case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + if ((json \ "isCompacted").asOpt[Boolean].getOrElse(false)) { + deserialize[CompactVolumeUpdateAction](jsonValue) + } else { + (json \ "name").as[String] match { + // Skeleton + case "createTree" => deserialize[CreateTreeSkeletonAction](jsonValue) + case "deleteTree" => deserialize[DeleteTreeSkeletonAction](jsonValue) + case "updateTree" => deserialize[UpdateTreeSkeletonAction](jsonValue) + case "mergeTree" => deserialize[MergeTreeSkeletonAction](jsonValue) + case "moveTreeComponent" => deserialize[MoveTreeComponentSkeletonAction](jsonValue) + case "createNode" => deserialize[CreateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "deleteNode" => deserialize[DeleteNodeSkeletonAction](jsonValue) + case "updateNode" => deserialize[UpdateNodeSkeletonAction](jsonValue, shouldTransformPositions = true) + case "createEdge" => deserialize[CreateEdgeSkeletonAction](jsonValue) + case "deleteEdge" => deserialize[DeleteEdgeSkeletonAction](jsonValue) + case "updateTreeGroups" => deserialize[UpdateTreeGroupsSkeletonAction](jsonValue) + case "updateSkeletonTracing" => deserialize[UpdateTracingSkeletonAction](jsonValue) + case "updateTreeVisibility" => deserialize[UpdateTreeVisibilitySkeletonAction](jsonValue) + case "updateTreeGroupVisibility" => deserialize[UpdateTreeGroupVisibilitySkeletonAction](jsonValue) + case "updateTreeEdgesVisibility" => deserialize[UpdateTreeEdgesVisibilitySkeletonAction](jsonValue) + case "updateUserBoundingBoxesInSkeletonTracing" => + deserialize[UpdateUserBoundingBoxesSkeletonAction](jsonValue) + case "updateUserBoundingBoxVisibilityInSkeletonTracing" => + deserialize[UpdateUserBoundingBoxVisibilitySkeletonAction](jsonValue) + + // Volume + case "updateBucket" => deserialize[UpdateBucketVolumeAction](jsonValue) + case "updateVolumeTracing" => deserialize[UpdateTracingVolumeAction](jsonValue) + case "updateUserBoundingBoxesInVolumeTracing" => + deserialize[UpdateUserBoundingBoxesVolumeAction](jsonValue) + case "updateUserBoundingBoxVisibilityInVolumeTracing" => + deserialize[UpdateUserBoundingBoxVisibilityVolumeAction](jsonValue) + case "removeFallbackLayer" => deserialize[RemoveFallbackLayerVolumeAction](jsonValue) + case "importVolumeTracing" => deserialize[ImportVolumeDataVolumeAction](jsonValue) + case "createSegment" => deserialize[CreateSegmentVolumeAction](jsonValue) + case "updateSegment" => deserialize[UpdateSegmentVolumeAction](jsonValue) + case "updateSegmentGroups" => deserialize[UpdateSegmentGroupsVolumeAction](jsonValue) + case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) + case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) + case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + + // Editable Mapping + case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) + case "splitAgglomerate" => deserialize[SplitAgglomerateUpdateAction](jsonValue) + + // Annotation + case "addLayerToAnnotation" => deserialize[AddLayerAnnotationAction](jsonValue) + case "deleteLayerFromAnnotation" => deserialize[DeleteLayerAnnotationAction](jsonValue) + case "updateLayerMetadata" => deserialize[UpdateLayerMetadataAnnotationAction](jsonValue) + case "updateMetadataOfAnnotation" => deserialize[UpdateMetadataAnnotationAction](jsonValue) + case "revertToVersion" => deserialize[RevertToVersionAnnotationAction](jsonValue) + case "resetToBase" => deserialize[ResetToBaseAnnotationAction](jsonValue) + case "updateTdCamera" => deserialize[UpdateTdCameraAnnotationAction](jsonValue) + + case unknownAction: String => JsError(s"Invalid update action s'$unknownAction'") + } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 61014e9632b..d977e0cd4f0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -338,6 +338,40 @@ case class UpdateSegmentGroupsVolumeAction(segmentGroups: List[UpdateActionSegme override def withActionTracingId(newTracingId: String): LayerUpdateAction = this.copy(actionTracingId = newTracingId) } + +// Only used to represent legacy update actions from the db where not all fields are set +// This is from a time when volume actions were not applied lazily +// (Before https://github.com/scalableminds/webknossos/pull/7917) +case class CompactVolumeUpdateAction(name: String, + actionTracingId: String, + actionTimestamp: Option[Long], + actionAuthorId: Option[String] = None, + value: JsObject) + extends VolumeUpdateAction { + override def addTimestamp(timestamp: Long): VolumeUpdateAction = this.copy(actionTimestamp = Some(timestamp)) + override def addAuthorId(authorId: Option[String]): VolumeUpdateAction = + this.copy(actionAuthorId = authorId) + override def addInfo(info: Option[String]): UpdateAction = this + override def withActionTracingId(newTracingId: String): LayerUpdateAction = + this.copy(actionTracingId = newTracingId) +} + +object CompactVolumeUpdateAction { + implicit object compactVolumeUpdateActionFormat extends Format[CompactVolumeUpdateAction] { + override def reads(json: JsValue): JsResult[CompactVolumeUpdateAction] = + for { + name <- (json \ "name").validate[String] + actionTracingId <- (json \ "value" \ "actionTracingId").validate[String] + actionTimestamp <- (json \ "value" \ "actionTimestamp").validateOpt[Long] + actionAuthorId <- (json \ "value" \ "actionAuthorId").validateOpt[String] + value <- (json \ "value").validate[JsObject].map(_ - "actionTracingId" - "actionTimestamp" - "actionAuthorId") + } yield CompactVolumeUpdateAction(name, actionTracingId, actionTimestamp, actionAuthorId, value) + + override def writes(o: CompactVolumeUpdateAction): JsValue = + Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) + } +} + object UpdateBucketVolumeAction { implicit val jsonFormat: OFormat[UpdateBucketVolumeAction] = Json.format[UpdateBucketVolumeAction] } From 171d853d2376f236f2ff60cdbe8653d2fcf18108 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 27 Nov 2024 13:36:48 +0100 Subject: [PATCH 222/361] fix removeAllButLastUpdateTracingAction --- .../helpers/compaction/compact_save_queue.ts | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts index 710ce9f273a..7504a03eded 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_save_queue.ts @@ -3,14 +3,18 @@ import type { SaveQueueEntry } from "oxalis/store"; function removeAllButLastUpdateTracingAction(updateActionsBatches: Array) { // This part of the code removes all entries from the save queue that consist only of - // one updateTracing update action, except for the last one - const updateTracingOnlyBatches = updateActionsBatches.filter( - (batch) => - batch.actions.length === 1 && - (batch.actions[0].name === "updateSkeletonTracing" || - batch.actions[0].name === "updateVolumeTracing"), + // one update{Skeleton,Volume}Tracing update action, except for the last one + const updateSkeletonTracingOnlyBatches = updateActionsBatches.filter( + (batch) => batch.actions.length === 1 && batch.actions[0].name === "updateSkeletonTracing", + ); + const updateVolumeTracingOnlyBatches = updateActionsBatches.filter( + (batch) => batch.actions.length === 1 && batch.actions[0].name === "updateVolumeTracing", + ); + return _.without( + updateActionsBatches, + ...updateSkeletonTracingOnlyBatches.slice(0, -1), + ...updateVolumeTracingOnlyBatches.slice(0, -1), ); - return _.without(updateActionsBatches, ...updateTracingOnlyBatches.slice(0, -1)); } function removeAllButLastUpdateTdCameraAction(updateActionsBatches: Array) { From 97e02e8a32878c07d49e35b3131b13f3e24a6f82 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 27 Nov 2024 13:37:27 +0100 Subject: [PATCH 223/361] adapt some todo comments --- frontend/javascripts/admin/admin_rest_api.ts | 8 +++++--- .../dashboard/explorative_annotations_view.tsx | 1 + .../oxalis/model/accessors/annotation_accessor.ts | 1 + .../javascripts/oxalis/model/reducers/save_reducer.ts | 2 +- .../test/backend-snapshot-tests/annotations.e2e.ts | 2 +- .../javascripts/test/sagas/skeletontracing_saga.spec.ts | 2 +- 6 files changed, 10 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index a7c3234ca3a..abd7f08ba99 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -730,12 +730,14 @@ export async function getMaybeOutdatedAnnotationInformation( return annotation; } +// todop: not used anywhere yet export async function getNewestAnnotationInformation( annotationId: string, tracingstoreUrl: string, ): Promise { const infoUrl = `${tracingstoreUrl}/tracings/annotation/${annotationId}`; - const annotationWithMessages = await Request.receiveJSON(infoUrl); // TODO adjust return type and implement proto type in frontend + // TODOp adjust return type and implement proto type in frontend + const annotationWithMessages = await Request.receiveJSON(infoUrl); // Extract the potential messages property before returning the task to avoid // failing e2e tests in annotations.e2e.ts @@ -855,7 +857,7 @@ export async function acquireAnnotationMutex( export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - version?: number | null | undefined, // TODOM: Use this parameter + version?: number | null | undefined, // TODOp: Use this parameter ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const tracingType = typ.toLowerCase() as "skeleton" | "volume"; @@ -1019,7 +1021,7 @@ export async function downloadAnnotation( includeVolumeData: boolean = true, ) { const searchParams = new URLSearchParams(); - // TODO: Use the version parameter + // TODOp: Use the version parameter /*Object.entries(versions).forEach(([key, val]) => { if (val != null) { searchParams.append(`${key}Version`, val.toString()); diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index e6a85d0f3c4..0a61e9cbf04 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -383,6 +383,7 @@ class ExplorativeAnnotationsView extends React.PureComponent { }; renameTracing(tracing: APIAnnotationInfo, name: string) { + // todop: this does not work because there is no save saga in the dashboard Store.dispatch( pushSaveQueueTransaction([updateMetadataOfAnnotation(name)], "unused-tracing-id"), ); diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index aae3bc468b4..1ca128dcd71 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -67,6 +67,7 @@ export function getSkeletonStats(stats: TracingStats): SkeletonTracingStats | un return stats[tracingId]; } } + return undefined; } export function getVolumeStats(stats: TracingStats): [string, VolumeTracingStats][] { diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index f16d4133c0f..9ebda96139a 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -189,7 +189,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } const layerIndependentActions = new Set([ - // TODOM: sync this with the backend. The backend currently has only two such actions that have this requirement. + // TODOp: sync this with the backend. The backend currently has only two such actions that have this requirement. "updateTdCamera", "revertToVersion", "addLayerToAnnotation", diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index d3a8b038100..aa81e98d6d2 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -154,7 +154,7 @@ async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueE ); } -// TODOM: Add tests for new update actions added in this pr (including updateAnnotationMetadata as this part of testing was removed editAnnotation() test case) +// TODOp: Add tests for new update actions added in this pr (including updateAnnotationMetadata as this part of testing was removed editAnnotation() test case) test.serial("Send update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 1e1011bba7c..82bb9e6f06a 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -79,7 +79,7 @@ function testDiffing( ); } -// TODOM +// TODOp not used? // biome-ignore lint/correctness/noUnusedVariables: function compactSaveQueueWithUpdateActions( queue: Array, From 7897f25554fe48aa5c76d07e1a70a3a9b5f39543 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 27 Nov 2024 14:18:31 +0100 Subject: [PATCH 224/361] change renameTracing to use old editAnnotation route --- frontend/javascripts/admin/admin_rest_api.ts | 1 + .../dashboard/explorative_annotations_view.tsx | 13 ++++++++----- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index abd7f08ba99..76ecba2f42f 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -602,6 +602,7 @@ export type EditableAnnotation = { viewConfiguration?: AnnotationViewConfiguration; }; +// todop: does this still work? discussion at https://github.com/scalableminds/webknossos/pull/7917#discussion_r1860594474 export function editAnnotation( annotationId: string, annotationType: APIAnnotationType, diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 0a61e9cbf04..e26cb80fc4b 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -383,11 +383,14 @@ class ExplorativeAnnotationsView extends React.PureComponent { }; renameTracing(tracing: APIAnnotationInfo, name: string) { - // todop: this does not work because there is no save saga in the dashboard - Store.dispatch( - pushSaveQueueTransaction([updateMetadataOfAnnotation(name)], "unused-tracing-id"), - ); - this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); + editAnnotation(tracing.id, tracing.typ, { name }) + .then(() => { + Toast.success(messages["annotation.was_edited"]); + this.updateTracingInLocalState(tracing, (t) => update(t, { name: { $set: name } })); + }) + .catch((error) => { + handleGenericError(error as Error, "Could not update the annotation name."); + }); } archiveAll = () => { From 5fc5813214809c50e24e195c4f35159d0c6756fe Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 27 Nov 2024 15:25:24 +0100 Subject: [PATCH 225/361] more comments --- frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx | 1 + frontend/javascripts/oxalis/model/sagas/proofread_saga.ts | 2 +- frontend/javascripts/oxalis/view/version_list.tsx | 5 +++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 5bb550e9eb0..0f3bfa6134e 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -100,6 +100,7 @@ export function* pushAnnotationUpdateAsync(action: Action) { yield* retry( SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, + // todop: shouldn't this work via the save queue now? editAnnotation, tracing.annotationId, tracing.annotationType, diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 7aa295aa711..13b61cdc7d1 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -276,7 +276,7 @@ function* createEditableMapping(): Saga { const baseMappingName = volumeTracing.mappingName; yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); yield* put(setHasEditableMappingAction()); - // Ensure the backend receives the correct mapping name, the fact that the mapping is locked and editable before doing the first proofreading operation. + // Ensure a saved state so that the mapping is locked and editable before doing the first proofreading operation. yield* call([Model, Model.ensureSavedState]); const editableMapping: ServerEditableMapping = { baseMappingName: baseMappingName, diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 20928c8345a..b709d8f4d91 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -53,7 +53,7 @@ export async function previewVersion(version?: number) { Store.dispatch(setAnnotationAllowUpdateAction(false)); const segmentationLayersToReload = []; - // TODO: properly determine which layers to reload. + // TODOp: properly determine which layers to reload. // No versions were passed which means that the newest annotation should be // shown. Therefore, reload all segmentation layers. segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); @@ -75,6 +75,7 @@ async function handleRestoreVersion( Store.dispatch( pushSaveQueueTransaction( [revertToVersion(version)], + // todop "experimental; leaving out tracingId as this should not be required", ), ); @@ -191,7 +192,7 @@ function InnerVersionList(props: Props & { newestVersion: number }) { const [initialVersion] = useState(tracing.version); function fetchPaginatedVersions({ pageParam }: { pageParam?: number }) { - // TODO: maybe refactor this so that this method is not calculated very rendering cycle + // TODOp: maybe refactor this so that this method is not calculated very rendering cycle if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } From 7734391c71ac67738609f709302d9504eba0c342 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 28 Nov 2024 09:44:22 +0100 Subject: [PATCH 226/361] fix merge conflicts --- frontend/javascripts/admin/admin_rest_api.ts | 134 +----------------- .../oxalis/model_initialization.ts | 30 +--- .../oxalis/view/jobs/train_ai_model.tsx | 14 -- frontend/javascripts/router.tsx | 12 -- .../backend-snapshot-tests/annotations.e2e.ts | 13 +- 5 files changed, 3 insertions(+), 200 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 512d7febf86..dc8b22ba9ac 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1,20 +1,17 @@ import ResumableJS from "resumablejs"; import _ from "lodash"; import dayjs from "dayjs"; -<<<<<<< HEAD import { - type APIActiveUser, type APIAnnotation, type APIAnnotationInfo, type APIAnnotationType, type APIAnnotationVisibility, - type APIAnnotationWithTask, type APIBuildInfo, type APIConnectomeFile, type APIDataSource, type APIDataStore, type APIDataset, - type APIDatasetId, + type APIDataSourceId, type APIFeatureToggles, type APIHistogramData, type APIMapping, @@ -33,7 +30,6 @@ import { type APIScript, type APIScriptCreator, type APIScriptUpdater, - type APITask, type APITaskType, type APITeam, type APITimeInterval, @@ -67,134 +63,6 @@ import { type APITimeTrackingPerUser, AnnotationLayerType, type APITracingStoreAnnotation, -||||||| 5d3d66d2ae -import type { - APIActiveUser, - APIAnnotation, - APIAnnotationInfo, - APIAnnotationType, - APIAnnotationVisibility, - APIAnnotationWithTask, - APIBuildInfo, - APIConnectomeFile, - APIDataSource, - APIDataStore, - APIDataset, - APIDatasetId, - APIFeatureToggles, - APIHistogramData, - APIMapping, - APIMaybeUnimportedDataset, - APIMeshFile, - APIAvailableTasksReport, - APIOrganization, - APIOrganizationCompact, - APIProject, - APIProjectCreator, - APIProjectProgressReport, - APIProjectUpdater, - APIProjectWithStatus, - APIPublication, - APIMagRestrictions, - APIScript, - APIScriptCreator, - APIScriptUpdater, - APITask, - APITaskType, - APITeam, - APITimeInterval, - APITimeTrackingPerAnnotation, - APITimeTrackingSpan, - APITracingStore, - APIUpdateActionBatch, - APIUser, - APIUserLoggedTime, - APIUserTheme, - AnnotationLayerDescriptor, - AnnotationViewConfiguration, - EditableLayerProperties, - ExperienceDomainList, - ServerTracing, - TracingType, - ServerEditableMapping, - APICompoundType, - ZarrPrivateLink, - VoxelyticsWorkflowReport, - VoxelyticsChunkStatistics, - ShortLink, - VoxelyticsWorkflowListing, - APIPricingPlanStatus, - VoxelyticsLogLine, - APIUserCompact, - APIDatasetCompact, - MaintenanceInfo, - AdditionalCoordinate, - LayerLink, - VoxelSize, - APITimeTrackingPerUser, -======= -import type { - APIAnnotation, - APIAnnotationInfo, - APIAnnotationType, - APIAnnotationVisibility, - APIBuildInfo, - APIConnectomeFile, - APIDataSource, - APIDataStore, - APIDataset, - APIDataSourceId, - APIFeatureToggles, - APIHistogramData, - APIMapping, - APIMaybeUnimportedDataset, - APIMeshFile, - APIAvailableTasksReport, - APIOrganization, - APIOrganizationCompact, - APIProject, - APIProjectCreator, - APIProjectProgressReport, - APIProjectUpdater, - APIProjectWithStatus, - APIPublication, - APIMagRestrictions, - APIScript, - APIScriptCreator, - APIScriptUpdater, - APITaskType, - APITeam, - APITimeInterval, - APITimeTrackingPerAnnotation, - APITimeTrackingSpan, - APITracingStore, - APIUpdateActionBatch, - APIUser, - APIUserLoggedTime, - APIUserTheme, - AnnotationLayerDescriptor, - AnnotationViewConfiguration, - EditableLayerProperties, - ExperienceDomainList, - ServerTracing, - TracingType, - ServerEditableMapping, - APICompoundType, - ZarrPrivateLink, - VoxelyticsWorkflowReport, - VoxelyticsChunkStatistics, - ShortLink, - VoxelyticsWorkflowListing, - APIPricingPlanStatus, - VoxelyticsLogLine, - APIUserCompact, - APIDatasetCompact, - MaintenanceInfo, - AdditionalCoordinate, - LayerLink, - VoxelSize, - APITimeTrackingPerUser, ->>>>>>> master } from "types/api_flow_types"; import type { AnnotationTypeFilterEnum, LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; import Constants, { ControlModeEnum, AnnotationStateFilterEnum } from "oxalis/constants"; diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index b9760b35db2..f27753a70ff 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -129,7 +129,6 @@ export async function initialize( if (initialCommandType.type === ControlModeEnum.TRACE) { const { annotationId } = initialCommandType; -<<<<<<< HEAD if (initialMaybeCompoundType != null) { annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); } else { @@ -154,26 +153,7 @@ export async function initialize( }; annotation = completeAnnotation; } - datasetId = { - name: annotation.dataSetName, - owningOrganization: annotation.organization, - }; -||||||| 5d3d66d2ae - annotation = - initialMaybeCompoundType != null - ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getAnnotationInformation(annotationId); - datasetId = { - name: annotation.dataSetName, - owningOrganization: annotation.organization, - }; -======= - annotation = - initialMaybeCompoundType != null - ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) - : await getAnnotationInformation(annotationId); datasetId = annotation.datasetId; ->>>>>>> master if (!annotation.restrictions.allowAccess) { Toast.error(messages["tracing.no_access"]); @@ -265,16 +245,8 @@ export async function initialize( async function fetchParallel( annotation: APIAnnotation | null | undefined, -<<<<<<< HEAD - datasetId: APIDatasetId, - version: number | undefined | null, -||||||| 5d3d66d2ae - datasetId: APIDatasetId, - versions?: Versions, -======= datasetId: string, - versions?: Versions, ->>>>>>> master + version: number | undefined | null, ): Promise<[APIDataset, UserConfiguration, Array]> { return Promise.all([ getDataset(datasetId, getSharingTokenFromUrlParameters()), diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 7d5605676cf..06f1a9bade5 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -477,22 +477,8 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { -<<<<<<< HEAD const annotation = await getMaybeOutdatedAnnotationInformation(item.annotationId); - const dataset = await getDataset({ - owningOrganization: annotation.organization, - name: annotation.dataSetName, - }); -||||||| 5d3d66d2ae - const annotation = await getAnnotationInformation(item.annotationId); - const dataset = await getDataset({ - owningOrganization: annotation.organization, - name: annotation.dataSetName, - }); -======= - const annotation = await getAnnotationInformation(item.annotationId); const dataset = await getDataset(annotation.datasetId); ->>>>>>> master const volumeServerTracings: ServerVolumeTracing[] = await Promise.all( annotation.annotationLayers diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index ae62ac37d80..7b15564126e 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,20 +1,8 @@ -<<<<<<< HEAD import { createExplorational, getMaybeOutdatedAnnotationInformation, - getOrganizationForDataset, - getShortLink, -} from "admin/admin_rest_api"; -||||||| 5d3d66d2ae -import { - createExplorational, - getAnnotationInformation, - getOrganizationForDataset, getShortLink, } from "admin/admin_rest_api"; -======= -import { createExplorational, getAnnotationInformation, getShortLink } from "admin/admin_rest_api"; ->>>>>>> master import AcceptInviteView from "admin/auth/accept_invite_view"; import AuthTokenView from "admin/auth/auth_token_view"; import ChangePasswordView from "admin/auth/change_password_view"; diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index decef73f588..2364cd6ff07 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -17,21 +17,10 @@ import * as api from "admin/admin_rest_api"; import generateDummyTrees from "oxalis/model/helpers/generate_dummy_trees"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -<<<<<<< HEAD import type { SaveQueueEntry } from "oxalis/store"; -const datasetId = { - name: "confocal-multi_knossos", - owningOrganization: "Organization_X", -}; -||||||| 5d3d66d2ae -const datasetId = { - name: "confocal-multi_knossos", - owningOrganization: "Organization_X", -}; -======= const datasetId = "59e9cfbdba632ac2ab8b23b3"; ->>>>>>> master + process.on("unhandledRejection", (err, promise) => { console.error("Unhandled rejection (promise: ", promise, ", reason: ", err, ")."); }); From a9fdce3364af2f2b2aef558fa7b5c895f9a7f49f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 28 Nov 2024 09:45:41 +0100 Subject: [PATCH 227/361] fix types for editAnnotation --- frontend/javascripts/admin/admin_rest_api.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index dc8b22ba9ac..e3113814c73 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -486,6 +486,8 @@ export function reOpenAnnotation( } export type EditableAnnotation = { + name: string; + description: string; visibility: APIAnnotationVisibility; tags: Array; viewConfiguration?: AnnotationViewConfiguration; From 62dc2832df1583692c936ff7858ae19fe9951000 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 28 Nov 2024 10:29:36 +0100 Subject: [PATCH 228/361] handle UpdatTdCamera updates, migration: mark more as compacted --- .../migration.py | 17 +++++++++-------- .../annotation/TSAnnotationService.scala | 2 ++ 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index ba9cfb11dc7..f4c8ed6f2a3 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -139,6 +139,7 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra for update in update_group_parsed: name = update["name"] + update_value = update["value"] # renamings if name == "updateTracing": @@ -155,14 +156,14 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra update["value"]["actionTracingId"] = tracing_id # identify compact update actions, and mark them - # Note: cannot identify compacted actions of the classes - # CreateSegmentVolumeActions UpdateSegmentVolumeAction, UpdateMappingNameAction - # as all their fields are optional - if (name == "updateBucket" and "position" not in update) \ - or (name == "updateVolumeTracing" and "activeSegmentId" not in update["value"]) \ - or (name == "updateUserBoundingBoxesInVolumeTracing" and "boundingBoxes" not in update) \ - or (name == "updateUserBoundingBoxVisibilityInVolumeTracing" and "boundingBoxId" not in update) \ - or (name == "deleteSegmentData" and "id" not in update): + if (name == "updateBucket" and "position" not in update_value) \ + or (name == "updateVolumeTracing" and "activeSegmentId" not in update_value) \ + or (name == "updateUserBoundingBoxesInVolumeTracing" and "boundingBoxes" not in update_value) \ + or (name == "updateUserBoundingBoxVisibilityInVolumeTracing" and "boundingBoxId" not in update_value) \ + or (name == "deleteSegmentData" and "id" not in update_value) \ + or (name == "createSegment" and "name" not in update_value) \ + or (name == "updateSegment" and "name" not in update_value) \ + or (name == "updateMappingName" and "mappingName" not in update_value): update["isCompacted"] = True return json_encoder.encode(update_group_parsed) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 78752d93ce4..c2f24a7366f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -160,6 +160,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. case _: CompactVolumeUpdateAction => Fox.successful(annotationWithTracings) // No-op, as legacy compacted update actions cannot be applied + case _: UpdateTdCameraAnnotationAction => + Fox.successful(annotationWithTracings) // No-op, exists just to mark these updates in the history / count times case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") } } yield updated From 9c1373c4fd92ab554fab67e4519567b4d1d64708 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 28 Nov 2024 10:56:28 +0100 Subject: [PATCH 229/361] fix parsing CompactVolumeUpdateActions --- .../webknossos/tracingstore/annotation/UpdateActions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index fe6389b810f..e0aa026db5b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -33,7 +33,7 @@ object UpdateAction { override def reads(json: JsValue): JsResult[UpdateAction] = { val jsonValue = (json \ "value").as[JsObject] if ((json \ "isCompacted").asOpt[Boolean].getOrElse(false)) { - deserialize[CompactVolumeUpdateAction](jsonValue) + deserialize[CompactVolumeUpdateAction](json) } else { (json \ "name").as[String] match { // Skeleton From bf918328346338734307675cb162cd65b355d2ab Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 28 Nov 2024 11:51:01 +0100 Subject: [PATCH 230/361] fix migration for proofreading, add some more error messages to backend --- .../migration.py | 86 ++++++++++++------- .../annotation/TSAnnotationService.scala | 11 +-- .../EditableMappingController.scala | 3 +- 3 files changed, 63 insertions(+), 37 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index f4c8ed6f2a3..9aad4282193 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -10,6 +10,7 @@ import msgspec import concurrent.futures import threading +from functools import partial import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -58,9 +59,9 @@ def migrate_annotation(self, annotation): before = time.time() try: mapping_id_map = self.build_mapping_id_map(annotation) - layer_version_mapping, latest_unified_version = self.migrate_updates(annotation, mapping_id_map) - self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) - self.create_and_save_annotation_proto(annotation, latest_unified_version) + layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) + materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + self.create_and_save_annotation_proto(annotation, materialized_versions) log_since(before, f"Migrating annotation {annotation['_id']}", self.get_progress()) except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") @@ -79,7 +80,7 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map[tracing_id] = editable_mapping_id return mapping_id_map - def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[LayerVersionMapping, int]: + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() batch_size = 1000 @@ -110,7 +111,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[Lay # TODO interleave updates rather than concat # TODO handle existing revertToVersion update actions - return version_mapping, unified_version + return version_mapping def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: if layer_type == "Skeleton": @@ -193,21 +194,27 @@ def update_collection_for_layer_type(self, layer_type): return "skeletonUpdates" return "volumeUpdates" - def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: + materialized_versions = [] for tracing_id, tracing_type in annotation["layers"].items(): - self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping, mapping_id_map) + materialized_versions_of_layer = \ + self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping, mapping_id_map) + materialized_versions += materialized_versions_of_layer + return materialized_versions - def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if layer_type == "Skeleton": - self.migrate_skeleton_proto(tracing_id, layer_version_mapping) + return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) if layer_type == "Volume": - self.migrate_volume_proto(tracing_id, layer_version_mapping) + materialized_volume_versions = self.migrate_volume_proto(tracing_id, layer_version_mapping, mapping_id_map) self.migrate_volume_buckets(tracing_id, layer_version_mapping) self.migrate_segment_index(tracing_id, layer_version_mapping) - self.migrate_editable_mapping(tracing_id, layer_version_mapping, mapping_id_map) + materialized_mapping_versions = self.migrate_editable_mapping(tracing_id, layer_version_mapping, mapping_id_map) + return materialized_volume_versions + materialized_mapping_versions - def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping) -> List[int]: collection = "skeletons" + materialized_versions_unified = [] materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: new_version = layer_version_mapping[tracing_id][materialized_version] @@ -217,20 +224,28 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe skeleton.ParseFromString(value_bytes) skeleton.version = new_version value_bytes = skeleton.SerializeToString() + materialized_versions_unified.append(new_version) self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified - def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): collection = "volumes" + materialized_versions_unified = [] materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: new_version = layer_version_mapping[tracing_id][materialized_version] value_bytes = self.get_bytes(collection, tracing_id, materialized_version) - if materialized_version != new_version: + if materialized_version != new_version or tracing_id in mapping_id_map: volume = Volume.VolumeTracing() volume.ParseFromString(value_bytes) volume.version = new_version + if tracing_id in mapping_id_map: + print(f"setting mappingName to tracing id {tracing_id}") + volume.mappingName = tracing_id value_bytes = volume.SerializeToString() + materialized_versions_unified.append(new_version) self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified def list_versions(self, collection, key) -> List[int]: reply = self.src_stub.ListVersions(proto.ListVersionsRequest(collection=collection, key=key)) @@ -269,6 +284,7 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: new_key = key if transform_key is not None: new_key = transform_key(key) + print(f"transformed key {key} to {new_key}") for version, value in zip(get_versions_reply.versions, get_versions_reply.values): new_version = layer_version_mapping[tracing_id][version] self.save_bytes(collection, new_key, new_version, value) @@ -280,41 +296,45 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: def migrate_segment_index(self, tracing_id, layer_version_mapping): self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) - def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): - self.migrate_editable_mapping_info(tracing_id, layer_version_mapping, mapping_id_map) - self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, layer_version_mapping) - self.migrate_editable_mapping_segment_to_agglomerate(tracing_id, layer_version_mapping) - - def migrate_editable_mapping_info(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if tracing_id not in mapping_id_map: - return + return [] mapping_id = mapping_id_map[tracing_id] + materialized_versions = self.migrate_editable_mapping_info(tracing_id, mapping_id, layer_version_mapping) + self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, mapping_id, layer_version_mapping) + self.migrate_editable_mapping_segment_to_agglomerate(tracing_id, mapping_id, layer_version_mapping) + return materialized_versions + + def migrate_editable_mapping_info(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping) -> List[int]: collection = "editableMappingsInfo" materialized_versions = self.list_versions(collection, mapping_id) + materialized_versions_unified = [] for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, mapping_id, materialized_version) new_version = layer_version_mapping[mapping_id][materialized_version] - self.save_bytes(collection, mapping_id, new_version, value_bytes) + materialized_versions_unified.append(new_version) + self.save_bytes(collection, tracing_id, new_version, value_bytes) + return materialized_versions_unified - def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): + print(f"migrate_editable_mapping_agglomerate_to_graph, traicng id {tracing_id} mapping id {mapping_id}") self.migrate_all_versions_and_keys_with_prefix( "editableMappingsAgglomerateToGraph", - tracing_id, + mapping_id, layer_version_mapping, - None + transform_key=partial(self.replace_before_first_slash, tracing_id) ) - def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): self.migrate_all_versions_and_keys_with_prefix( "editableMappingsSegmentToAgglomerate", - tracing_id, + mapping_id, layer_version_mapping, - None + transform_key=partial(self.replace_before_first_slash, tracing_id) ) - def create_and_save_annotation_proto(self, annotation, latest_unified_version: int): - version_step = 1000 - for version in range(0, latest_unified_version, version_step): + def create_and_save_annotation_proto(self, annotation, materialized_versions: List[int]): + for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() annotationProto.name = annotation["name"] annotationProto.description = annotation["description"] @@ -380,6 +400,10 @@ def remove_morton_index(self, bucket_key: str) -> str: first_bracket_index = bucket_key.index('[') return bucket_key[:second_slash_index + 1] + bucket_key[first_bracket_index:] + def replace_before_first_slash(self, replacement_prefix: str, key) -> str: + slash_pos = key.find('/') + return replacement_prefix + key[slash_pos:] + def get_progress(self) -> str: with self.done_count_lock: done_count = self.done_count diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c2f24a7366f..10430fe52d0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -71,7 +71,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotation <- if (isTemporaryTracing) temporaryTracingService.getAnnotation(annotationId) else for { - withTracings <- getWithTracings(annotationId, version) + withTracings <- getWithTracings(annotationId, version) ?~> "annotation.notFound" } yield withTracings.annotation } yield annotation @@ -85,7 +85,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - newestMaterialized <- getNewestMaterialized(annotationId) + newestMaterialized <- getNewestMaterialized(annotationId) ?~> "getNewestMaterialized.failed" targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" // When requesting any other than the newest version, do not consider the changes final reportChangesToWk = version.isEmpty || version.contains(targetVersion) @@ -109,7 +109,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId, annotationWithTracings, annotation.version, - version) // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters + version // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters + ) ?~> "findEditableMappingsForAnnotation.failed" updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" } yield updated @@ -326,13 +327,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss idInfoUpdaterTuples <- Fox.serialCombined(volumeWithEditableMapping) { case (volumeTracing, volumeTracingId) => for { - editableMappingInfo <- getEditableMappingInfoRaw(volumeTracingId, annotationWithTracings.version) + editableMappingInfo <- getEditableMappingInfoRaw(volumeTracingId, annotationWithTracings.version) ?~> "getEditableMappingInfoRaw.failed" updater <- editableMappingUpdaterFor(annotationId, volumeTracingId, volumeTracing, editableMappingInfo.value, currentMaterializedVersion, - targetVersion) + targetVersion) ?~> "EditableMappingUpdater.initialize.failed" } yield (editableMappingInfo.key, (editableMappingInfo.value, updater)) } } yield annotationWithTracings.copy(editableMappingsByTracingId = idInfoUpdaterTuples.toMap) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 7ac752929fa..656eca8407d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -72,13 +72,14 @@ class EditableMappingController @Inject()( for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) + currentVersion <- annotationService.currentMaterializableVersion(annotationId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version = None) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, - tracing.version, + currentVersion, tracingId, remoteFallbackLayer) agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) From f3808076b849f2b1a5b7a6af7ae0d37dbf8f02d7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 28 Nov 2024 12:03:54 +0100 Subject: [PATCH 231/361] when initializing an annotation in the front-end use annotation.version for that --- .../javascripts/oxalis/model/reducers/save_reducer.ts | 11 +++-------- frontend/javascripts/oxalis/model_initialization.ts | 2 +- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 9ebda96139a..07a5e40e5ea 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -2,7 +2,6 @@ import _ from "lodash"; import update from "immutability-helper"; import type { Action } from "oxalis/model/actions/actions"; import type { OxalisState, SaveState } from "oxalis/store"; -import type { SetVersionNumberAction } from "oxalis/model/actions/save_actions"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; @@ -26,12 +25,6 @@ export function getTotalSaveQueueLength(queueObj: SaveState["queue"]) { return queueObj.length; } -function updateVersion(state: OxalisState, action: SetVersionNumberAction) { - return updateKey(state, "tracing", { - version: action.version, - }); -} - function SaveReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { case "PUSH_SAVE_QUEUE_TRANSACTION": { @@ -169,7 +162,9 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } case "SET_VERSION_NUMBER": { - return updateVersion(state, action); + return updateKey(state, "tracing", { + version: action.version, + }); } case "DISABLE_SAVING": { diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index f27753a70ff..b672d02d5df 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -303,7 +303,7 @@ function initializeTracing( // This method is not called for the View mode const { dataset } = Store.getState(); let annotation = _annotation; - let version = 0; + let version = annotation.version; const { allowedModes, preferredMode } = determineAllowedModes(annotation.settings); _.extend(annotation.settings, { From fe1ce0b5ace1e5135a045a81eaec1b2d144bef74 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 28 Nov 2024 12:12:54 +0100 Subject: [PATCH 232/361] remove max handling of versions --- frontend/javascripts/oxalis/model/sagas/save_saga.ts | 6 +----- frontend/javascripts/oxalis/model_initialization.ts | 4 +--- frontend/javascripts/test/sagas/save_saga.spec.ts | 8 +------- 3 files changed, 3 insertions(+), 15 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 0d543b93f70..ec9fcb05c24 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -164,11 +164,7 @@ export function* sendSaveRequestToServer(): Saga { const fullSaveQueue = yield* select((state) => state.save.queue); const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); let compactedSaveQueue = compactSaveQueue(saveQueue); - const tracing = yield* select((state) => state.tracing); - const tracings = yield* select((state) => - _.compact([state.tracing.skeleton, ...state.tracing.volumes]), - ); - const version = _.max(tracings.map((t) => t.version).concat([tracing.version])) || 0; + const version = yield* select((state) => state.tracing.version); const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); let versionIncrement; diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index b672d02d5df..03f4b2f7454 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -303,7 +303,7 @@ function initializeTracing( // This method is not called for the View mode const { dataset } = Store.getState(); let annotation = _annotation; - let version = annotation.version; + const version = annotation.version; const { allowedModes, preferredMode } = determineAllowedModes(annotation.settings); _.extend(annotation.settings, { @@ -335,7 +335,6 @@ function initializeTracing( getSegmentationLayers(dataset).length > 0, messages["tracing.volume_missing_segmentation"], ); - version = Math.max(version, volumeTracing.version); Store.dispatch(initializeVolumeTracingAction(volumeTracing)); }); @@ -347,7 +346,6 @@ function initializeTracing( // To generate a huge amount of dummy trees, use: // import generateDummyTrees from "./model/helpers/generate_dummy_trees"; // tracing.trees = generateDummyTrees(1, 200000); - version = Math.max(version, skeletonTracing.version); Store.dispatch(initializeSkeletonTracingAction(skeletonTracing)); } Store.dispatch(setVersionNumberAction(version)); diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 90cf88d7579..963effa2e4a 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -295,13 +295,7 @@ test("SaveSaga should remove the correct update actions", (t) => { const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); saga.next(TRACINGSTORE_URL); expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(3))); From 3fbdf013bbb5bbbc3e25f7b1ed7be20962b224f9 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 28 Nov 2024 12:42:32 +0100 Subject: [PATCH 233/361] wip: allow updating annotation name from dashboard --- app/controllers/AnnotationController.scala | 2 - .../WKRemoteTracingStoreController.scala | 47 ++++++++++--------- .../controllers/TSAnnotationController.scala | 39 +++++++++++++-- ...alableminds.webknossos.tracingstore.routes | 1 + 4 files changed, 62 insertions(+), 27 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 670d0ced13a..63b5ca0883f 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -16,7 +16,6 @@ import models.analytics.{AnalyticsService, CreateAnnotationEvent, OpenAnnotation import models.annotation.AnnotationState.Cancelled import models.annotation._ import models.dataset.{DatasetDAO, DatasetService} -import models.organization.OrganizationDAO import models.project.ProjectDAO import models.task.TaskDAO import models.team.{TeamDAO, TeamService} @@ -40,7 +39,6 @@ class AnnotationController @Inject()( annotationLayerDAO: AnnotationLayerDAO, taskDAO: TaskDAO, userDAO: UserDAO, - organizationDAO: OrganizationDAO, datasetDAO: DatasetDAO, datasetService: DatasetService, annotationService: AnnotationService, diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index cb47191a36d..2a1463ff952 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -55,28 +55,31 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore Action.async(validateProto[AnnotationProto]) { implicit request => // tracingstore only sends this request after ensuring write access implicit val ctx: DBAccessContext = GlobalAccessContext - for { - annotationIdValidated <- ObjectId.fromString(annotationId) - existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) - newLayersProto = request.body.annotationLayers - existingLayerIds = existingLayers.map(_.tracingId).toSet - newLayerIds = newLayersProto.map(_.tracingId).toSet - layerIdsToDelete = existingLayerIds.diff(newLayerIds) - layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) - layerIdsToInsert = newLayerIds.diff(existingLayerIds) - _ <- Fox.serialCombined(layerIdsToDelete.toList)( - annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) - _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToInsert.contains(l.tracingId))) { layerProto => - annotationLayerDAO.insertOne(annotationIdValidated, AnnotationLayer.fromProto(layerProto)) - } - _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => - annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) - // Layer stats are ignored here, they are sent eagerly when saving updates - _ <- annotationDAO.updateName(annotationIdValidated, - request.body.name.getOrElse(AnnotationDefaults.defaultName)) - _ <- annotationDAO.updateDescription(annotationIdValidated, - request.body.description.getOrElse(AnnotationDefaults.defaultDescription)) - } yield Ok + tracingStoreService.validateAccess(name, key) { _ => + for { + annotationIdValidated <- ObjectId.fromString(annotationId) + existingLayers <- annotationLayerDAO.findAnnotationLayersFor(annotationIdValidated) + newLayersProto = request.body.annotationLayers + existingLayerIds = existingLayers.map(_.tracingId).toSet + newLayerIds = newLayersProto.map(_.tracingId).toSet + layerIdsToDelete = existingLayerIds.diff(newLayerIds) + layerIdsToUpdate = existingLayerIds.intersect(newLayerIds) + layerIdsToInsert = newLayerIds.diff(existingLayerIds) + _ <- Fox.serialCombined(layerIdsToDelete.toList)( + annotationLayerDAO.deleteOneByTracingId(annotationIdValidated, _)) + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToInsert.contains(l.tracingId))) { layerProto => + annotationLayerDAO.insertOne(annotationIdValidated, AnnotationLayer.fromProto(layerProto)) + } + _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => + annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) + // Layer stats are ignored here, they are sent eagerly when saving updates + _ <- annotationDAO.updateName(annotationIdValidated, + request.body.name.getOrElse(AnnotationDefaults.defaultName)) + _ <- annotationDAO.updateDescription( + annotationIdValidated, + request.body.description.getOrElse(AnnotationDefaults.defaultDescription)) + } yield Ok + } } def handleTracingUpdateReport(name: String, key: String): Action[AnnotationUpdatesReport] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index bebe002531c..7f5f7afc577 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -4,23 +4,39 @@ import collections.SequenceUtils import com.google.inject.Inject import com.scalableminds.util.geometry.BoundingBox import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.Annotation.{AnnotationLayerProto, AnnotationLayerTypeProto, AnnotationProto} +import com.scalableminds.webknossos.datastore.Annotation.{ + AnnotationLayerProto, + AnnotationLayerTypeProto, + AnnotationProto +} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.TracingStoreAccessTokenService -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, ResetToBaseAnnotationAction, TSAnnotationService, UpdateActionGroup} +import com.scalableminds.webknossos.tracingstore.annotation.{ + AnnotationTransactionService, + ResetToBaseAnnotationAction, + TSAnnotationService, + UpdateActionGroup, + UpdateMetadataAnnotationAction +} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages -import play.api.libs.json.Json +import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext +case class UpdateAnnotationMetadataParameters(name: Option[String], description: Option[String]) + +object UpdateAnnotationMetadataParameters { + implicit val jsonFormat: OFormat[UpdateAnnotationMetadataParameters] = Json.format[UpdateAnnotationMetadataParameters] +} + class TSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, @@ -55,6 +71,23 @@ class TSAnnotationController @Inject()( } } + def updateMetadata(annotationId: String): Action[UpdateAnnotationMetadataParameters] = + Action.async(validateJson[UpdateAnnotationMetadataParameters]) { implicit request => + log() { + logTime(slackNotificationService.noticeSlowRequest) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { + for { + currentVersion <- annotationService.currentMaterializableVersion(annotationId) + _ <- annotationTransactionService.handleSingleUpdateAction( + annotationId, + currentVersion, + UpdateMetadataAnnotationAction(name = request.body.name, description = request.body.description)) + } yield Ok + } + } + } + } + def updateActionLog(annotationId: String, newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 93205b4153b..a7fca5c6c7c 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -8,6 +8,7 @@ GET /health POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) +POST /annotation/:annotationId/updateMetadata @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateMetadata(annotationId: String) GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) From f48f080c83f1a0e73f5e4b539bbc36af29337c33 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 28 Nov 2024 15:21:54 +0100 Subject: [PATCH 234/361] migration use passed pg password, add ETR, remove debug logging --- .../connections.py | 2 +- .../migration.py | 19 +++++++++++-------- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py index d5b85704ac9..367a70c7cec 100644 --- a/tools/migration-unified-annotation-versioning/connections.py +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -34,7 +34,7 @@ def assert_grpc_success(reply): def connect_to_postgres(postgres_config: str): parsed = parse_connection_string(postgres_config) password = os.environ.get("PG_PASSWORD", "postgres") - return psycopg2.connect(host=parsed["host"], port=parsed["port"], database=parsed["database"], user=parsed["user"], password="postgres") + return psycopg2.connect(host=parsed["host"], port=parsed["port"], database=parsed["database"], user=parsed["user"], password=password) def parse_connection_string(connection_string: str) -> Dict[str, Any]: diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 9aad4282193..cd73d9e4428 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -16,7 +16,7 @@ import VolumeTracing_pb2 as Volume import SkeletonTracing_pb2 as Skeleton import Annotation_pb2 as AnnotationProto -from utils import log_since, batch_range +from utils import log_since, batch_range, humanize_time_diff from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success logger = logging.getLogger(__name__) @@ -42,7 +42,7 @@ def __init__(self, args): self.total_count = None def run(self): - before = time.time() + self.before = time.time() annotations = self.read_annotation_list() self.done_count = 0 self.failure_count = 0 @@ -52,7 +52,7 @@ def run(self): executor.map(self.migrate_annotation, annotations) if self.failure_count > 0: logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") - log_since(before, "Migrating all the things") + log_since(self.before, "Migrating all the things") def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") @@ -62,7 +62,7 @@ def migrate_annotation(self, annotation): layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) self.create_and_save_annotation_proto(annotation, materialized_versions) - log_since(before, f"Migrating annotation {annotation['_id']}", self.get_progress()) + log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: @@ -240,7 +240,6 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers volume.ParseFromString(value_bytes) volume.version = new_version if tracing_id in mapping_id_map: - print(f"setting mappingName to tracing id {tracing_id}") volume.mappingName = tracing_id value_bytes = volume.SerializeToString() materialized_versions_unified.append(new_version) @@ -284,7 +283,6 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: new_key = key if transform_key is not None: new_key = transform_key(key) - print(f"transformed key {key} to {new_key}") for version, value in zip(get_versions_reply.versions, get_versions_reply.values): new_version = layer_version_mapping[tracing_id][version] self.save_bytes(collection, new_key, new_version, value) @@ -317,7 +315,6 @@ def migrate_editable_mapping_info(self, tracing_id: str, mapping_id: str, layer_ return materialized_versions_unified def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): - print(f"migrate_editable_mapping_agglomerate_to_graph, traicng id {tracing_id} mapping id {mapping_id}") self.migrate_all_versions_and_keys_with_prefix( "editableMappingsAgglomerateToGraph", mapping_id, @@ -408,4 +405,10 @@ def get_progress(self) -> str: with self.done_count_lock: done_count = self.done_count percentage = 100.0 * done_count / self.total_count - return f". ({done_count}/{self.total_count} = {percentage:.1f}% are done)" + duration = time.time() - self.before + if done_count > 0: + etr = duration / done_count * (self.total_count - done_count) + etr_formatted = f" ETR {humanize_time_diff(etr)})" + else: + etr_formatted = "" + return f". ({done_count}/{self.total_count} = {percentage:.1f}% done.{etr_formatted}" From 402403a7c618dd95a3ac8da5cc5a4b1b035c1d65 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 28 Nov 2024 19:10:12 +0100 Subject: [PATCH 235/361] fix version handling in model initialization etc --- frontend/javascripts/admin/admin_rest_api.ts | 29 +++++----------- .../model/actions/annotation_actions.ts | 3 +- .../oxalis/model/helpers/proto_helpers.ts | 2 +- .../model/reducers/annotation_reducer.ts | 4 +-- .../oxalis/model/reducers/reducer_helpers.ts | 6 ++-- .../oxalis/model/sagas/save_saga.ts | 8 +++-- .../oxalis/model_initialization.ts | 33 ++++++++++++------- .../javascripts/oxalis/view/version_list.tsx | 4 +-- .../skeletontracing_server_objects.ts | 1 - .../fixtures/tasktracing_server_objects.ts | 1 - .../fixtures/volumetracing_server_objects.ts | 1 - .../test/geometries/skeleton.spec.ts | 5 ++- frontend/javascripts/types/api_flow_types.ts | 3 +- 13 files changed, 52 insertions(+), 48 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e3113814c73..bbeec5e7b74 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -80,9 +80,9 @@ import type { import { V3 } from "libs/mjs"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; import { + parseProtoAnnotation, parseProtoListOfLong, parseProtoTracing, - parseProtoTracingStoreAnnotation, serializeProtoListOfLong, } from "oxalis/model/helpers/proto_helpers"; import type { RequestOptions } from "libs/request"; @@ -622,21 +622,6 @@ export async function getMaybeOutdatedAnnotationInformation( return annotation; } -// todop: not used anywhere yet -export async function getNewestAnnotationInformation( - annotationId: string, - tracingstoreUrl: string, -): Promise { - const infoUrl = `${tracingstoreUrl}/tracings/annotation/${annotationId}`; - // TODOp adjust return type and implement proto type in frontend - const annotationWithMessages = await Request.receiveJSON(infoUrl); - - // Extract the potential messages property before returning the task to avoid - // failing e2e tests in annotations.e2e.ts - const { messages: _messages, ...annotation } = annotationWithMessages; - return annotation; -} - export async function getAnnotationCompoundInformation( annotationId: string, annotationType: APICompoundType, @@ -749,7 +734,7 @@ export async function acquireAnnotationMutex( export async function getTracingForAnnotationType( annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, - version?: number | null | undefined, // TODOp: Use this parameter + version?: number | null | undefined, ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const tracingType = typ.toLowerCase() as "skeleton" | "volume"; @@ -806,7 +791,7 @@ export function getUpdateActionLog( }); } -export function getNewestVersionForTracing( +export function getNewestVersionForAnnotation( tracingStoreUrl: string, annotationId: string, ): Promise { @@ -817,13 +802,15 @@ export function getNewestVersionForTracing( ); } -export async function getNewestVersionOfTracing( +export async function getAnnotationProto( tracingStoreUrl: string, annotationId: string, + version?: number | null | undefined, ): Promise { + const possibleVersionString = version != null ? `&version=${version}` : ""; const annotationArrayBuffer = await doWithToken((token) => Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/annotation/${annotationId}?token=${token}`, + `${tracingStoreUrl}/tracings/annotation/${annotationId}?token=${token}${possibleVersionString}`, { headers: { Accept: "application/x-protobuf", @@ -831,7 +818,7 @@ export async function getNewestVersionOfTracing( }, ), ); - return parseProtoTracingStoreAnnotation(annotationArrayBuffer); + return parseProtoAnnotation(annotationArrayBuffer); } export function hasSegmentIndexInDataStore( diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index a362151b945..68560d94b46 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -8,6 +8,7 @@ import type { EditableLayerProperties, } from "types/api_flow_types"; import type { + Annotation, MappingType, UserBoundingBox, UserBoundingBoxWithoutId, @@ -95,7 +96,7 @@ export const AllUserBoundingBoxActions = [ "DELETE_USER_BOUNDING_BOX", "ADD_USER_BOUNDING_BOXES", ]; -export const initializeAnnotationAction = (annotation: APIAnnotation) => +export const initializeAnnotationAction = (annotation: Annotation) => ({ type: "INITIALIZE_ANNOTATION", annotation, diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index 3af4f4e3c13..0a92dc310f2 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -67,7 +67,7 @@ export function parseProtoListOfLong( }).items; } -export function parseProtoTracingStoreAnnotation(annotationArrayBuffer: ArrayBuffer): any { +export function parseProtoAnnotation(annotationArrayBuffer: ArrayBuffer): any { const protoRoot = Root.fromJSON(AnnotationProto); const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.AnnotationProto`); const message = messageType.decode(new Uint8Array(annotationArrayBuffer)); diff --git a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts index 0bc85460aa5..11ea81b47e3 100644 --- a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts @@ -6,7 +6,6 @@ import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import * as Utils from "libs/utils"; import { getDisplayedDataExtentInPlaneMode } from "oxalis/model/accessors/view_mode_accessor"; -import { convertServerAnnotationToFrontendAnnotation } from "oxalis/model/reducers/reducer_helpers"; import _ from "lodash"; import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; import { getMeshesForAdditionalCoordinates } from "../accessors/volumetracing_accessor"; @@ -75,8 +74,7 @@ const maybeAddAdditionalCoordinatesToMeshState = ( function AnnotationReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { case "INITIALIZE_ANNOTATION": { - const annotationInfo = convertServerAnnotationToFrontendAnnotation(action.annotation); - return updateTracing(state, annotationInfo); + return updateTracing(state, action.annotation); } case "SET_ANNOTATION_NAME": { diff --git a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts index 837813f2e58..1a5f73098ba 100644 --- a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts @@ -84,7 +84,10 @@ export function convertPointToVecInBoundingBox(boundingBox: ServerBoundingBox): topLeft: Utils.point3ToVector3(boundingBox.topLeft), }; } -export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnotation): Annotation { +export function convertServerAnnotationToFrontendAnnotation( + annotation: APIAnnotation, + version: number, +): Annotation { const { id: annotationId, visibility, @@ -99,7 +102,6 @@ export function convertServerAnnotationToFrontendAnnotation(annotation: APIAnnot othersMayEdit, isLockedByOwner, annotationLayers, - version, } = annotation; const restrictions = { ...annotation.restrictions, ...annotation.settings }; return { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index ec9fcb05c24..b32a123c865 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -1,4 +1,4 @@ -import { doWithToken, getNewestVersionForTracing } from "admin/admin_rest_api"; +import { doWithToken, getNewestVersionForAnnotation } from "admin/admin_rest_api"; import Date from "libs/date"; import ErrorHandling from "libs/error_handling"; import type { RequestOptionsWithData } from "libs/request"; @@ -488,7 +488,11 @@ function* watchForSaveConflicts() { maybeSkeletonTracing, ]); - const versionOnServer = yield* call(getNewestVersionForTracing, tracingStoreUrl, annotationId); + const versionOnServer = yield* call( + getNewestVersionForAnnotation, + tracingStoreUrl, + annotationId, + ); for (const tracing of tracings) { // Read the tracing version again from the store, since the diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 03f4b2f7454..d8dd1cf3606 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -9,6 +9,7 @@ import type { ServerEditableMapping, APICompoundType, APISegmentationLayer, + APITracingStoreAnnotation, } from "types/api_flow_types"; import { computeDataTexturesSetup, @@ -42,7 +43,7 @@ import { getDatasetViewConfiguration, getEditableMappingInfo, getAnnotationCompoundInformation, - getNewestVersionOfTracing, + getAnnotationProto, } from "admin/admin_rest_api"; import { dispatchMaybeFetchMeshFilesAsync, @@ -104,7 +105,10 @@ import { PricingPlanEnum, isFeatureAllowedByPricingPlan, } from "admin/organization/pricing_plan_utils"; -import { convertServerAdditionalAxesToFrontEnd } from "./model/reducers/reducer_helpers"; +import { + convertServerAdditionalAxesToFrontEnd, + convertServerAnnotationToFrontendAnnotation, +} from "./model/reducers/reducer_helpers"; import { setVersionNumberAction } from "./model/actions/save_actions"; export const HANDLED_ERROR = "error_was_handled"; @@ -125,6 +129,7 @@ export async function initialize( > { Store.dispatch(setControlModeAction(initialCommandType.type)); let annotation: APIAnnotation | null | undefined; + let annotationProto: APITracingStoreAnnotation | null | undefined; let datasetId: string; if (initialCommandType.type === ControlModeEnum.TRACE) { @@ -133,12 +138,12 @@ export async function initialize( annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); } else { let maybeOutdatedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); - const annotationFromTracingStore = await getNewestVersionOfTracing( + annotationProto = await getAnnotationProto( maybeOutdatedAnnotation.tracingStore.url, maybeOutdatedAnnotation.id, + version, ); - // TODOP: potential updating the version of the annotation is needed. It is at least not done here. - const layersWithStats = annotationFromTracingStore.annotationLayers.map((layer) => { + const layersWithStats = annotationProto.annotationLayers.map((layer) => { return { tracingId: layer.tracingId, name: layer.name, @@ -147,8 +152,8 @@ export async function initialize( }); const completeAnnotation = { ...maybeOutdatedAnnotation, - name: annotationFromTracingStore.name, - description: annotationFromTracingStore.description, + name: annotationProto.name, + description: annotationProto.description, annotationLayers: layersWithStats, }; annotation = completeAnnotation; @@ -224,7 +229,11 @@ export async function initialize( annotation.tracingStore.url, serverVolumeTracings, ); - initializeTracing(annotation, serverTracings, editableMappings); + if (annotationProto == null) { + // Satisfy TS. annotationProto should always exist if annotation exists. + throw new Error("Annotation protobuf should not be null."); + } + initializeAnnotation(annotation, annotationProto.version, serverTracings, editableMappings); } else { // In view only tracings we need to set the view mode too. const { allowedModes } = determineAllowedModes(); @@ -295,15 +304,15 @@ function maybeWarnAboutUnsupportedLayers(layers: Array): void { } } -function initializeTracing( +function initializeAnnotation( _annotation: APIAnnotation, + version: number, serverTracings: Array, editableMappings: Array, ) { // This method is not called for the View mode const { dataset } = Store.getState(); let annotation = _annotation; - const version = annotation.version; const { allowedModes, preferredMode } = determineAllowedModes(annotation.settings); _.extend(annotation.settings, { @@ -329,7 +338,9 @@ function initializeTracing( }; } - Store.dispatch(initializeAnnotationAction(annotation)); + Store.dispatch( + initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, version)), + ); getServerVolumeTracings(serverTracings).map((volumeTracing) => { ErrorHandling.assert( getSegmentationLayers(dataset).length > 0, diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index b709d8f4d91..78ad6efcac2 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -7,7 +7,7 @@ import { chunkIntoTimeWindows } from "libs/utils"; import { getUpdateActionLog, downloadAnnotation, - getNewestVersionForTracing, + getNewestVersionForAnnotation, } from "admin/admin_rest_api"; import { handleGenericError } from "libs/error_handling"; import { @@ -165,7 +165,7 @@ function VersionList(props: Props) { const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( - () => getNewestVersionForTracing(tracingStoreUrl, annotationId), + () => getNewestVersionForAnnotation(tracingStoreUrl, annotationId), null, [annotationId], ); diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index dab6077ad8f..37879615786 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -178,7 +178,6 @@ export const annotation: APIAnnotation = { allowDownload: true, allowSave: true, }, - version: 0, annotationLayers: [ { name: AnnotationLayerType.Skeleton, diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 41d707d649c..673b1780e7d 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -74,7 +74,6 @@ export const annotation: APIAnnotation = { name: "", description: "", stats: {}, - version: 0, typ: "Task", task: { id: "5b1fd1cb97000027049c67ec", diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index f9b489b0e68..c931ad7a9b6 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -74,7 +74,6 @@ export const annotation: APIAnnotation = { datasetId: "66f3c82966010034942e9740", description: "", state: "Active", - version: 0, id: "598b52293c00009906f043e7", visibility: "Internal", modified: 1529066010230, diff --git a/frontend/javascripts/test/geometries/skeleton.spec.ts b/frontend/javascripts/test/geometries/skeleton.spec.ts index f5fcdda4d96..bc81d95f537 100644 --- a/frontend/javascripts/test/geometries/skeleton.spec.ts +++ b/frontend/javascripts/test/geometries/skeleton.spec.ts @@ -10,6 +10,7 @@ import test from "ava"; import type { Vector3 } from "oxalis/constants"; import type { OxalisState } from "oxalis/store"; import { tracing, annotation } from "../fixtures/skeletontracing_server_objects"; +import { convertServerAnnotationToFrontendAnnotation } from "oxalis/model/reducers/reducer_helpers"; mockRequire("app", { currentUser: { @@ -38,7 +39,9 @@ test.before((t) => { const resolution = 0; tracing.trees = []; delete tracing.activeNodeId; - Store.dispatch(initializeAnnotationAction(annotation)); + Store.dispatch( + initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, 0)), + ); Store.dispatch(initializeSkeletonTracingAction(tracing)); // Create 20 trees with 100 nodes each diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 5ee43d2f84c..b3c0cd686bd 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -557,7 +557,6 @@ type APIAnnotationBase = APIAnnotationInfo & { readonly owner?: APIUserBase; // This `user` attribute is deprecated and should not be used, anymore. It only exists to satisfy e2e type checks readonly user?: APIUserBase; - readonly version: number; readonly contributors: APIUserBase[]; readonly othersMayEdit: boolean; }; @@ -587,6 +586,7 @@ export type APITracingStoreAnnotation = { name: string; description: string; version: number; + earliestAccessibleVersion: number; annotationLayers: APITracingStoreAnnotationLayer[]; }; @@ -897,6 +897,7 @@ export type ServerEditableMapping = { // The id of the volume tracing the editable mapping belongs to tracingId: string; }; + export type APIMeshFile = { meshFileName: string; mappingName?: string | null | undefined; From be8cdcdc2761584fc06738678e0152796d15c7b8 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 11:50:44 +0100 Subject: [PATCH 236/361] adapt the types so that update actions that need isolation cannot be grouped with others --- .../explorative_annotations_view.tsx | 2 - .../model/actions/annotation_actions.ts | 1 - .../oxalis/model/actions/save_actions.ts | 32 ++++++++++-- .../model/bucket_data_handling/pushqueue.ts | 4 +- .../bucket_data_handling/wkstore_adapter.ts | 4 +- .../compaction/compact_toggle_actions.ts | 8 +-- .../compaction/compact_update_actions.ts | 10 ++-- .../oxalis/model/reducers/save_reducer.ts | 7 +-- .../oxalis/model/sagas/proofread_saga.ts | 12 ++--- .../oxalis/model/sagas/save_saga.ts | 9 ++-- .../model/sagas/skeletontracing_saga.ts | 10 ++-- .../oxalis/model/sagas/update_actions.ts | 11 +++- .../oxalis/model/sagas/volumetracing_saga.tsx | 6 +-- .../left-border-tabs/layer_settings_tab.tsx | 16 +++--- .../modals/add_volume_layer_modal.tsx | 50 +++++++++---------- .../javascripts/oxalis/view/version_list.tsx | 7 ++- .../javascripts/test/helpers/saveHelpers.ts | 12 +++-- .../test/reducers/save_reducer.spec.ts | 4 +- .../test/sagas/skeletontracing_saga.spec.ts | 5 +- 19 files changed, 121 insertions(+), 89 deletions(-) diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index e26cb80fc4b..9576a2324a3 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -66,8 +66,6 @@ import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; import type { SearchProps } from "antd/lib/input"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; -import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; -import { updateMetadataOfAnnotation } from "oxalis/model/sagas/update_actions"; const { Search } = Input; const pageLength: number = 1000; diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index 68560d94b46..7879b2f17de 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -1,5 +1,4 @@ import type { - APIAnnotation, APIAnnotationVisibility, APIDataLayer, APIDataset, diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 64f2c04eadc..f85453d9387 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -1,11 +1,20 @@ import type { Dispatch } from "redux"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { + UpdateAction, + UpdateActionWithoutIsolationRequirement, + UpdateActionWithIsolationRequirement, +} from "oxalis/model/sagas/update_actions"; import { getUid } from "libs/uid_generator"; import Date from "libs/date"; import Deferred from "libs/async/deferred"; export type SaveQueueType = "skeleton" | "volume" | "mapping"; -export type PushSaveQueueTransaction = ReturnType; +export type PushSaveQueueTransaction = { + type: "PUSH_SAVE_QUEUE_TRANSACTION"; + items: UpdateAction[]; + tracingId: string; + transactionId: string; +}; type SaveNowAction = ReturnType; export type ShiftSaveQueueAction = ReturnType; type DiscardSaveQueuesAction = ReturnType; @@ -28,11 +37,14 @@ export type SaveAction = | RedoAction | DisableSavingAction; +// The action creators pushSaveQueueTransaction and pushSaveQueueTransactionIsolated +// are typed so that update actions that need isolation are isolated in a group each. +// From this point on, we can assume that the groups fulfil the isolation requirement. export const pushSaveQueueTransaction = ( - items: Array, + items: Array, tracingId: string, transactionId: string = getUid(), -) => +): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, @@ -40,6 +52,18 @@ export const pushSaveQueueTransaction = ( transactionId, }) as const; +export const pushSaveQueueTransactionIsolated = ( + item: UpdateActionWithIsolationRequirement, + tracingId: string, + transactionId: string = getUid(), +): PushSaveQueueTransaction => + ({ + type: "PUSH_SAVE_QUEUE_TRANSACTION", + items: [item], + tracingId, + transactionId, + }) as const; + export const saveNowAction = () => ({ type: "SAVE_NOW", diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index c5e1294bf6e..9151402b388 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -5,7 +5,7 @@ import { createDebouncedAbortableParameterlessCallable } from "libs/async/deboun import { call } from "redux-saga/effects"; import Store from "oxalis/store"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import type { UpdateAction } from "../sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "../sagas/update_actions"; import { AsyncFifoResolver } from "libs/async/async_fifo_resolver"; import { escalateErrorAction } from "../actions/actions"; @@ -35,7 +35,7 @@ class PushQueue { // Helper to ensure the Store's save queue is filled in the correct // order. - private fifoResolver = new AsyncFifoResolver(); + private fifoResolver = new AsyncFifoResolver(); // If the timestamp is defined, it encodes when the first bucket // was added to the PushQueue that will be part of the next (to be created) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 15a91c259b3..595f6d7d53c 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -13,7 +13,7 @@ import { needsLocalHdf5Mapping, } from "oxalis/model/accessors/volumetracing_accessor"; import { parseMaybe } from "libs/utils"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import { updateBucket } from "oxalis/model/sagas/update_actions"; import ByteArraysToLz4Base64Worker from "oxalis/workers/byte_arrays_to_lz4_base64.worker"; import DecodeFourBitWorker from "oxalis/workers/decode_four_bit.worker"; @@ -276,7 +276,7 @@ function sliceBufferIntoPieces( export async function createCompressedUpdateBucketActions( batch: Array, -): Promise { +): Promise { return _.flatten( await Promise.all( _.chunk(batch, COMPRESSION_BATCH_SIZE).map(async (batchSubset) => { diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index cc7ee5af199..159b6511c17 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -6,7 +6,7 @@ import _ from "lodash"; import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import type { - UpdateAction, + UpdateActionWithoutIsolationRequirement, UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; import { updateTreeGroupVisibility, updateTreeVisibility } from "oxalis/model/sagas/update_actions"; @@ -137,9 +137,9 @@ function isCommonAncestorToggler( } export default function compactToggleActions( - updateActions: UpdateAction[], + updateActions: UpdateActionWithoutIsolationRequirement[], tracing: SkeletonTracing | VolumeTracing, -): UpdateAction[] { +): UpdateActionWithoutIsolationRequirement[] { if (tracing.type !== "skeleton") { // Don't do anything if this is not a skeleton tracing return updateActions; @@ -148,7 +148,7 @@ export default function compactToggleActions( const skeletonTracing = tracing; // Extract the toggleActions which we are interested in - const [toggleActions, remainingActions] = _.partition( + const [toggleActions, remainingActions] = _.partition( updateActions, (ua) => ua.name === "updateTreeVisibility", ); diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index b16e490e5e8..886c384bde0 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -6,7 +6,7 @@ import type { DeleteEdgeUpdateAction, DeleteNodeUpdateAction, DeleteTreeUpdateAction, - UpdateAction, + UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; import { moveTreeComponent } from "oxalis/model/sagas/update_actions"; import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; @@ -17,7 +17,7 @@ function cantor(a: number, b: number): number { return 0.5 * (a + b) * (a + b + 1) + b; } -function compactMovedNodesAndEdges(updateActions: Array) { +function compactMovedNodesAndEdges(updateActions: Array) { // This function detects tree merges and splits. // It does so by identifying nodes and edges that were deleted in one tree only to be created // in another tree again afterwards. @@ -135,7 +135,7 @@ function compactMovedNodesAndEdges(updateActions: Array) { return compactedActions; } -function compactDeletedTrees(updateActions: Array) { +function compactDeletedTrees(updateActions: Array) { // This function detects deleted trees. // Instead of sending deleteNode/deleteEdge update actions for all nodes of a deleted tree, // just one deleteTree update action is sufficient for the server to delete the tree. @@ -155,9 +155,9 @@ function compactDeletedTrees(updateActions: Array) { } export default function compactUpdateActions( - updateActions: Array, + updateActions: Array, tracing: SkeletonTracing | VolumeTracing, -): Array { +): Array { return compactToggleActions( compactDeletedTrees(compactMovedNodesAndEdges(updateActions)), tracing, diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 07a5e40e5ea..f7b8b434e46 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -183,8 +183,9 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } } -const layerIndependentActions = new Set([ - // TODOp: sync this with the backend. The backend currently has only two such actions that have this requirement. +const LAYER_INDEPENDENT_ACTIONS = new Set([ + // todop: Related to IsolationSensitiveAction in backend? + // todop: sync this with the backend. The backend currently has only two such actions that have this requirement. "updateTdCamera", "revertToVersion", "addLayerToAnnotation", @@ -198,7 +199,7 @@ export function addTracingIdToActions( tracingId: string, ): Array { return actions.map((action) => { - if (layerIndependentActions.has(action.name)) { + if (LAYER_INDEPENDENT_ACTIONS.has(action.name)) { return action as UpdateAction; } return { diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 13b61cdc7d1..abc8f416455 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -37,7 +37,7 @@ import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { splitAgglomerate, mergeAgglomerate, - type UpdateAction, + type UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; import { Model, api, Store } from "oxalis/singletons"; import { @@ -395,7 +395,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; if (action.type === "MERGE_TREES") { if (sourceAgglomerateId === targetAgglomerateId) { Toast.error("Segments that should be merged need to be in different agglomerates."); @@ -533,7 +533,7 @@ function* performMinCut( editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null, - items: UpdateAction[], + items: UpdateActionWithoutIsolationRequirement[], ): Saga { if (sourceAgglomerateId !== targetAgglomerateId) { Toast.error( @@ -599,7 +599,7 @@ function* performCutFromNeighbors( editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null | undefined, - items: UpdateAction[], + items: UpdateActionWithoutIsolationRequirement[], ): Saga< { didCancel: false; neighborInfo: NeighborInfo } | { didCancel: true; neighborInfo?: null } > { @@ -717,7 +717,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; if (action.type === "PROOFREAD_MERGE") { if (sourceAgglomerateId === targetAgglomerateId) { @@ -928,7 +928,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; const { didCancel, neighborInfo } = yield* call( performCutFromNeighbors, diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index b32a123c865..0f844b61ce6 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -38,7 +38,10 @@ import { SAVE_RETRY_WAITING_TIME, } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; -import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; +import type { + UpdateActionWithoutIsolationRequirement, + UpdateActionWithTracingId, +} from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; import { Model } from "oxalis/singletons"; @@ -346,8 +349,8 @@ export function performDiffTracing( flycam: Flycam, _prevTdCamera: CameraData, _tdCamera: CameraData, -): Array { - let actions: Array = []; +): Array { + let actions: Array = []; if (prevTracing.type === "skeleton" && tracing.type === "skeleton") { actions = actions.concat( diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index 2ba3cc4ec0f..3bb6c8ca5bf 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -14,7 +14,7 @@ import { race, } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import { TreeTypeEnum } from "oxalis/constants"; import { createEdge, @@ -482,7 +482,7 @@ function* diffNodes( prevNodes: NodeMap, nodes: NodeMap, treeId: number, -): Generator { +): Generator { if (prevNodes === nodes) return; const { onlyA: deletedNodeIds, @@ -517,7 +517,7 @@ function* diffEdges( prevEdges: EdgeCollection, edges: EdgeCollection, treeId: number, -): Generator { +): Generator { if (prevEdges === edges) return; const { onlyA: deletedEdges, onlyB: addedEdges } = diffEdgeCollections(prevEdges, edges); @@ -559,7 +559,7 @@ function updateTreePredicate(prevTree: Tree, tree: Tree): boolean { export function* diffTrees( prevTrees: TreeMap, trees: TreeMap, -): Generator { +): Generator { if (prevTrees === trees) return; const { onlyA: deletedTreeIds, @@ -615,7 +615,7 @@ export function* diffSkeletonTracing( skeletonTracing: SkeletonTracing, prevFlycam: Flycam, flycam: Flycam, -): Generator { +): Generator { if (prevSkeletonTracing !== skeletonTracing) { for (const action of cachedDiffTrees(prevSkeletonTracing.trees, skeletonTracing.trees)) { yield action; diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 65e3fa2d863..3b21f5e9546 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -62,7 +62,16 @@ export type UpdateMetadataOfAnnotationUpdateAction = ReturnType; export type MergeAgglomerateUpdateAction = ReturnType; +// There are two types of UpdateActions. The ones that *need* to be in a separate transaction +// group. And the ones that don't have this requirement. export type UpdateAction = + | UpdateActionWithoutIsolationRequirement + | UpdateActionWithIsolationRequirement; + +export type UpdateActionWithIsolationRequirement = + | RevertToVersionUpdateAction + | AddLayerToAnnotationUpdateAction; +export type UpdateActionWithoutIsolationRequirement = | UpdateTreeUpdateAction | DeleteTreeUpdateAction | MergeTreeUpdateAction @@ -84,13 +93,11 @@ export type UpdateAction = | UpdateTreeVisibilityUpdateAction | UpdateTreeEdgesVisibilityUpdateAction | UpdateTreeGroupVisibilityUpdateAction - | RevertToVersionUpdateAction | UpdateSegmentGroupsUpdateAction | UpdateTreeGroupsUpdateAction | RemoveFallbackLayerUpdateAction | UpdateTdCameraUpdateAction | UpdateMappingNameUpdateAction - | AddLayerToAnnotationUpdateAction | DeleteAnnotationLayerUpdateAction | UpdateAnnotationLayerNameUpdateAction | UpdateMetadataOfAnnotationUpdateAction diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 8d55bf16b94..de6e7a1f6cf 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -87,7 +87,7 @@ import { } from "oxalis/model/sagas/saga_helpers"; import { deleteSegmentDataVolumeAction, - type UpdateAction, + type UpdateActionWithoutIsolationRequirement, updateSegmentGroups, } from "oxalis/model/sagas/update_actions"; import { @@ -641,7 +641,7 @@ export const cachedDiffSegmentLists = memoizeOne( function* uncachedDiffSegmentLists( prevSegments: SegmentMap, newSegments: SegmentMap, -): Generator { +): Generator { const { onlyA: deletedSegmentIds, onlyB: addedSegmentIds, @@ -687,7 +687,7 @@ export function* diffVolumeTracing( volumeTracing: VolumeTracing, prevFlycam: Flycam, flycam: Flycam, -): Generator { +): Generator { if (updateTracingPredicate(prevVolumeTracing, volumeTracing, prevFlycam, flycam)) { yield updateVolumeTracing( volumeTracing, diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index d997d87fa90..5dee1ae0404 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -130,7 +130,7 @@ import { getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; import defaultState from "oxalis/default_state"; -import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { pushSaveQueueTransactionIsolated } from "oxalis/model/actions/save_actions"; import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; type DatasetSettingsProps = { @@ -1663,14 +1663,12 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ addSkeletonLayerToAnnotation() { dispatch( - pushSaveQueueTransaction( - [ - addLayerToAnnotation({ - typ: "Skeleton", - name: "skeleton", - fallbackLayerName: undefined, - }), - ], + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Skeleton", + name: "skeleton", + fallbackLayerName: undefined, + }), "unused-tracing-id", ), ); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index 33885d71953..8bf6001e6cf 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -26,7 +26,7 @@ import InputComponent from "oxalis/view/components/input_component"; import { api, Model } from "oxalis/singletons"; import Toast from "libs/toast"; import { MappingStatusEnum } from "oxalis/constants"; -import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; +import { pushSaveQueueTransactionIsolated } from "oxalis/model/actions/save_actions"; import { useDispatch } from "react-redux"; import { addLayerToAnnotation } from "oxalis/model/sagas/update_actions"; @@ -166,18 +166,17 @@ export default function AddVolumeLayerModal({ if (selectedSegmentationLayerName == null) { dispatch( - pushSaveQueueTransaction( - [ - addLayerToAnnotation({ - typ: "Volume", - name: newLayerName, - fallbackLayerName: undefined, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - }), - ], + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName: undefined, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + }), + "unused-tracing-id", ), ); @@ -201,19 +200,18 @@ export default function AddVolumeLayerModal({ } dispatch( - pushSaveQueueTransaction( - [ - addLayerToAnnotation({ - typ: "Volume", - name: newLayerName, - fallbackLayerName, - magRestrictions: { - min: minResolutionAllowed, - max: maxResolutionAllowed, - }, - mappingName: maybeMappingName, - }), - ], + pushSaveQueueTransactionIsolated( + addLayerToAnnotation({ + typ: "Volume", + name: newLayerName, + fallbackLayerName, + magRestrictions: { + min: minResolutionAllowed, + max: maxResolutionAllowed, + }, + mappingName: maybeMappingName, + }), + "unused-tracing-id", ), ); diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 78ad6efcac2..3c878c2b624 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -11,7 +11,7 @@ import { } from "admin/admin_rest_api"; import { handleGenericError } from "libs/error_handling"; import { - pushSaveQueueTransaction, + pushSaveQueueTransactionIsolated, setVersionNumberAction, } from "oxalis/model/actions/save_actions"; import { @@ -73,9 +73,8 @@ async function handleRestoreVersion( const newestVersion = _.max(versions.map((batch) => batch.version)) || 0; Store.dispatch(setVersionNumberAction(newestVersion)); Store.dispatch( - pushSaveQueueTransaction( - [revertToVersion(version)], - // todop + pushSaveQueueTransactionIsolated( + revertToVersion(version), "experimental; leaving out tracingId as this should not be required", ), ); diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 09703d25e29..30c32d013de 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,11 +1,11 @@ import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import type { SaveQueueEntry } from "oxalis/store"; import dummyUser from "test/fixtures/dummy_user"; export function createSaveQueueFromUpdateActions( - updateActions: UpdateAction[][], + updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, tracingId: string, stats: TracingStats | null = null, @@ -22,11 +22,15 @@ export function createSaveQueueFromUpdateActions( transactionId: "dummyRequestId", })); } -export function withoutUpdateTracing(items: Array): Array { +export function withoutUpdateTracing( + items: Array, +): Array { return items.filter( (item) => item.name !== "updateSkeletonTracing" && item.name !== "updateVolumeTracing", ); } -export function withoutUpdateTree(items: Array): Array { +export function withoutUpdateTree( + items: Array, +): Array { return items.filter((item) => item.name !== "updateTree"); } diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 6428fb94bd8..cde84f98ec5 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -4,7 +4,7 @@ import "test/reducers/save_reducer.mock"; import dummyUser from "test/fixtures/dummy_user"; import type { OxalisState } from "oxalis/store"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; const TIMESTAMP = 1494695001688; const DateMock = { @@ -64,7 +64,7 @@ test("Save should add more update actions to the queue", (t) => { t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { - const items: UpdateAction[] = []; + const items: UpdateActionWithoutIsolationRequirement[] = []; const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, []); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 82bb9e6f06a..323af891bb6 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -27,7 +27,7 @@ import { TreeTypeEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; @@ -92,12 +92,13 @@ function compactSaveQueueWithUpdateActions( // filling the save queue). one could probably combine compactUpdateActions and // createSaveQueueFromUpdateActions to have a createCompactedSaveQueueFromUpdateActions // helper function and use that in this spec. + // @ts-ignore queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), ); } function createCompactedSaveQueueFromUpdateActions( - updateActions: UpdateAction[][], + updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, tracing: SkeletonTracing, stats: TracingStats | null = null, From 3ca9833520cf937689931106f4d63a42b3ba82b8 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 11:59:14 +0100 Subject: [PATCH 237/361] remove downsampling feature volume annotations --- frontend/javascripts/admin/admin_rest_api.ts | 15 ---- frontend/javascripts/oxalis/api/api_latest.ts | 22 ------ .../left-border-tabs/layer_settings_tab.tsx | 47 ++--------- .../modals/downsample_volume_modal.tsx | 78 ------------------- 4 files changed, 6 insertions(+), 156 deletions(-) delete mode 100644 frontend/javascripts/oxalis/view/left-border-tabs/modals/downsample_volume_modal.tsx diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index bbeec5e7b74..76dace215c0 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -927,21 +927,6 @@ export async function downloadAnnotation( await downloadWithFilename(downloadUrl); } -// When the annotation is open, please use the corresponding method -// in api_latest.js. It will take care of saving the annotation and -// reloading it. -export async function downsampleSegmentation( - annotationId: string, - annotationType: APIAnnotationType, - tracingId: string, -): Promise { - await Request.receiveJSON( - `/api/annotations/${annotationType}/${annotationId}/downsample?tracingId=${tracingId}`, - { - method: "PATCH", - }, - ); -} // ### Datasets export async function getDatasets( isUnreported: boolean | null | undefined = null, diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 8168cdc5181..f8a48b1cb12 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -45,7 +45,6 @@ import { doWithToken, finishAnnotation, getMappingsForDatasetLayer, - downsampleSegmentation, sendAnalyticsEvent, } from "admin/admin_rest_api"; import { @@ -1511,27 +1510,6 @@ class TracingApi { this.setAnnotationTool(tool); } - /** - * Use this method to create a complete magnification pyramid by downsampling the lowest present mag (e.g., mag 1). - This method will save the current changes and then reload the page after the downsampling - has finished. - This function can only be used for non-tasks. - Note that this invoking this method will not block the UI. Thus, user actions can be performed during the - downsampling. The caller should prohibit this (e.g., by showing a not-closable modal during the process). - */ - async downsampleSegmentation(volumeTracingId: string) { - const state = Store.getState(); - const { annotationId, annotationType } = state.tracing; - - if (state.task != null) { - throw new Error("Cannot downsample segmentation for a task."); - } - - await this.save(); - await downsampleSegmentation(annotationId, annotationType, volumeTracingId); - await this.hardReload(); - } - /** * Disables the saving for the current annotation. * WARNING: Cannot be undone. Only do this if you know what you are doing. diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 5dee1ae0404..84252188192 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -88,7 +88,6 @@ import { userSettings } from "types/schemas/user_settings.schema"; import type { Vector3, ControlMode } from "oxalis/constants"; import Constants, { ControlModeEnum, MappingStatusEnum } from "oxalis/constants"; import EditableTextLabel from "oxalis/view/components/editable_text_label"; -import LinkButton from "components/link_button"; import { Model } from "oxalis/singletons"; import type { VolumeTracing, @@ -113,7 +112,6 @@ import { } from "messages"; import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; import AddVolumeLayerModal, { validateReadableLayerName } from "./modals/add_volume_layer_modal"; -import DownsampleVolumeModal from "./modals/downsample_volume_modal"; import Histogram, { isHistogramSupported } from "./histogram_view"; import MappingSettingsView from "./mapping_settings_view"; import { confirmAsync } from "../../../dashboard/dataset/helper_components"; @@ -164,9 +162,6 @@ type DatasetSettingsProps = { }; type State = { - // If this is set to not-null, the downsampling modal - // is shown for that VolumeTracing - volumeTracingToDownsample: VolumeTracing | null | undefined; isAddVolumeLayerModalVisible: boolean; preselectedSegmentationLayerName: string | undefined; segmentationLayerWasPreselected: boolean | undefined; @@ -369,7 +364,6 @@ function LayerInfoIconWithTooltip({ class DatasetSettings extends React.PureComponent { onChangeUser: Record) => any>; state: State = { - volumeTracingToDownsample: null, isAddVolumeLayerModalVisible: false, preselectedSegmentationLayerName: undefined, segmentationLayerWasPreselected: false, @@ -1157,21 +1151,12 @@ class DatasetSettings extends React.PureComponent { } return ( - - this.showDownsampleVolumeModal(volumeTracing)}> - Magnification Icon - + + ); }; @@ -1319,18 +1304,6 @@ class DatasetSettings extends React.PureComponent { ); }; - showDownsampleVolumeModal = (volumeTracing: VolumeTracing) => { - this.setState({ - volumeTracingToDownsample: volumeTracing, - }); - }; - - hideDownsampleVolumeModal = () => { - this.setState({ - volumeTracingToDownsample: null, - }); - }; - showAddVolumeLayerModal = () => { this.setState({ isAddVolumeLayerModalVisible: true, @@ -1570,14 +1543,6 @@ class DatasetSettings extends React.PureComponent { ) : null} - {this.state.volumeTracingToDownsample != null ? ( - - ) : null} - {this.state.layerToMergeWithFallback != null ? ( void; - magsToDownsample: Array; - volumeTracing: VolumeTracing; -}) { - const [isDownsampling, setIsDownsampling] = useState(false); - - const handleTriggerDownsampling = async () => { - setIsDownsampling(true); - await api.tracing.downsampleSegmentation(volumeTracing.tracingId); - setIsDownsampling(false); - }; - - return ( - void) | null' is not assignable to ty... Remove this comment to see the full error message - onCancel={isDownsampling ? null : hideDownsampleVolumeModal} - footer={null} - width={800} - maskClosable={false} - open - > -

- This annotation does not have volume annotation data in all magnifications. Consequently, - annotation data cannot be rendered at all zoom values. By clicking "Downsample", - WEBKNOSSOS will use the best magnification of the volume data to create all dependent mags. -

-

- The following magnifications will be added when clicking "Downsample":{" "} - {magsToDownsample.map((mag) => mag.join("-")).join(", ")}. -

-
- The cause for the missing magnifications can be one of the following: -
    -
  • - The annotation was created before WEBKNOSSOS supported multi-magnification volume - tracings. -
  • -
  • An old annotation was uploaded which did not include all magnifications.
  • -
  • - The annotation was created in a task that was restricted to certain magnifications. -
  • -
  • The dataset was mutated to have more magnifications.
  • -
-
-

- Note that this action might take a few minutes. Afterwards, the annotation is reloaded. - Also, the version history of the volume data will be reset. -

-
- - Downsample - -
-
- ); -} From 36542174d45fcb90c946178e859861ad4148e47d Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 12:06:06 +0100 Subject: [PATCH 238/361] use editableMappingId == volumeTracingId constraint --- .../oxalis/model/sagas/proofread_saga.ts | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index abc8f416455..889ff4d40c9 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -390,8 +390,6 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { const sourceAgglomerateId = sourceInfo.agglomerateId; const targetAgglomerateId = targetInfo.agglomerateId; - const editableMappingId = volumeTracing.mappingName; - /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ @@ -442,7 +440,6 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, - editableMappingId, volumeTracingId, sourceTree, items, @@ -530,7 +527,6 @@ function* performMinCut( sourceSegmentId: number, targetSegmentId: number, agglomerateFileMag: Vector3, - editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null, items: UpdateActionWithoutIsolationRequirement[], @@ -548,7 +544,7 @@ function* performMinCut( segmentId2: targetSegmentId, mag: agglomerateFileMag, agglomerateId: sourceAgglomerateId, - editableMappingId, + editableMappingId: volumeTracingId, }; const edgesToRemove = yield* call( @@ -596,7 +592,6 @@ function* performCutFromNeighbors( segmentId: number, segmentPosition: Vector3 | null, agglomerateFileMag: Vector3, - editableMappingId: string, volumeTracingId: string, sourceTree: Tree | null | undefined, items: UpdateActionWithoutIsolationRequirement[], @@ -608,7 +603,7 @@ function* performCutFromNeighbors( segmentId, mag: agglomerateFileMag, agglomerateId, - editableMappingId, + editableMappingId: volumeTracingId, }; const neighborInfo = yield* call( @@ -770,7 +765,6 @@ function* handleProofreadMergeOrMinCut(action: Action) { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, - volumeTracing.mappingName, volumeTracingId, null, items, @@ -921,8 +915,6 @@ function* handleProofreadCutFromNeighbors(action: Action) { const targetAgglomerateId = idInfos[0].agglomerateId; const targetSegmentId = idInfos[0].unmappedId; - const editableMappingId = volumeTracing.mappingName; - const targetAgglomerate = volumeTracing.segments.getNullable(Number(targetAgglomerateId)); /* Send the respective split/merge update action to the backend (by pushing to the save queue @@ -936,7 +928,6 @@ function* handleProofreadCutFromNeighbors(action: Action) { targetSegmentId, targetPosition, agglomerateFileMag, - editableMappingId, volumeTracingId, action.tree, items, From 01953aaf77271f7055fd25ab7ea88d2254e42e26 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 14:05:04 +0100 Subject: [PATCH 239/361] fix deletion of layer --- frontend/javascripts/admin/admin_rest_api.ts | 13 ------------- .../oxalis/model/sagas/update_actions.ts | 4 ++-- .../view/left-border-tabs/layer_settings_tab.tsx | 12 ++++++++++-- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 76dace215c0..02a269b11db 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -540,19 +540,6 @@ type AnnotationLayerCreateDescriptor = { magRestrictions?: APIMagRestrictions | null | undefined; }; -export function deleteAnnotationLayer( - annotationId: string, - annotationType: APIAnnotationType, - layerName: string, -): Promise { - return Request.receiveJSON( - `/api/annotations/${annotationType}/${annotationId}/deleteAnnotationLayer?layerName=${layerName}`, - { - method: "PATCH", - }, - ); -} - export function finishAnnotation( annotationId: string, annotationType: APIAnnotationType, diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 3b21f5e9546..b5e161e4e62 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -567,11 +567,11 @@ export function addLayerToAnnotation(parameters: AnnotationLayerCreationParamete export function deleteAnnotationLayer( tracingId: string, layerName: string, - typ: "Skeleton" | "Volume", + type: "Skeleton" | "Volume", ) { return { name: "deleteLayerFromAnnotation", - value: { tracingId, layerName, typ }, + value: { tracingId, layerName, type }, } as const; } diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 84252188192..6b2c8a412d6 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -128,7 +128,10 @@ import { getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; import defaultState from "oxalis/default_state"; -import { pushSaveQueueTransactionIsolated } from "oxalis/model/actions/save_actions"; +import { + pushSaveQueueTransaction, + pushSaveQueueTransactionIsolated, +} from "oxalis/model/actions/save_actions"; import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; type DatasetSettingsProps = { @@ -1640,7 +1643,12 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ }, deleteAnnotationLayer(tracingId: string, type: AnnotationLayerType, layerName: string) { - dispatch(deleteAnnotationLayer(tracingId, layerName, type)); + dispatch( + pushSaveQueueTransaction( + [deleteAnnotationLayer(tracingId, layerName, type)], + "unused-tracing-id", + ), + ); }, }); From 1dcd33ef48941e2c7e87fb6dc426ab7563c26213 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 14:34:42 +0100 Subject: [PATCH 240/361] fix importVolumeData --- frontend/javascripts/admin/admin_rest_api.ts | 3 ++- .../trees_tab/skeleton_tab_view.tsx | 19 +++++++++---------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 02a269b11db..ba5b34841f5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -855,6 +855,7 @@ export async function importVolumeTracing( tracing: Tracing, volumeTracing: VolumeTracing, dataFile: File, + version: number, ): Promise { return doWithToken((token) => Request.sendMultipartFormReceiveJSON( @@ -862,7 +863,7 @@ export async function importVolumeTracing( { data: { dataFile, - currentVersion: volumeTracing.version, + currentVersion: version, }, }, ), diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx index e805e65170b..2de7e883f40 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx @@ -134,12 +134,12 @@ export async function importTracingFiles(files: Array, createGroupForEachF } }; - const tryParsingFileAsNml = async (file: File) => { + const tryParsingFileAsNml = async (file: File, warnAboutVolumes: boolean = true) => { try { const nmlString = await readFileAsText(file); const { trees, treeGroups, userBoundingBoxes, datasetName, containedVolumes } = await parseNml(nmlString); - if (containedVolumes) { + if (containedVolumes && warnAboutVolumes) { Toast.warning( "The NML file contained volume information which was ignored. Please upload the NML into the dashboard to create a new annotation which also contains the volume data.", ); @@ -210,7 +210,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF const nmlBlob = await nmlFileEntry.getData!(new BlobWriter()); const nmlFile = new File([nmlBlob], nmlFileEntry.filename); - const nmlImportActions = await tryParsingFileAsNml(nmlFile); + const nmlImportActions = await tryParsingFileAsNml(nmlFile, false); const dataFileEntry = entries.find((entry: Entry) => Utils.isFileExtensionEqualTo(entry.filename, "zip"), @@ -240,15 +240,14 @@ export async function importTracingFiles(files: Array, createGroupForEachF tracing, oldVolumeTracing, dataFile, + tracing.version, ); - if (oldVolumeTracing) { - Store.dispatch(importVolumeTracingAction()); - Store.dispatch(setVersionNumberAction(tracing.version + 1)); - Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); - await clearCache(dataset, oldVolumeTracing.tracingId); - await api.data.reloadBuckets(oldVolumeTracing.tracingId); - } + Store.dispatch(importVolumeTracingAction()); + Store.dispatch(setVersionNumberAction(tracing.version + 1)); + Store.dispatch(setLargestSegmentIdAction(newLargestSegmentId)); + await clearCache(dataset, oldVolumeTracing.tracingId); + await api.data.reloadBuckets(oldVolumeTracing.tracingId); } await reader.close(); From 2a639d2a2657a3e27b2f0db2f005d9fdd2f4e945 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 16:11:08 +0100 Subject: [PATCH 241/361] improve labels for proofreading entries in version restore view --- .../javascripts/oxalis/view/version_entry.tsx | 34 ++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index f807529acdd..b588c453528 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -93,14 +93,32 @@ const descriptionFns: Record< : "Deactivated the active mapping.", icon: , }), - splitAgglomerate: (action: SplitAgglomerateUpdateAction): Description => ({ - description: `Split agglomerate ${action.value.agglomerateId} by separating the segments at position ${action.value.segmentPosition1} and ${action.value.segmentPosition2}.`, - icon: , - }), - mergeAgglomerate: (action: MergeAgglomerateUpdateAction): Description => ({ - description: `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments at position ${action.value.segmentPosition1} and ${action.value.segmentPosition2}.`, - icon: , - }), + splitAgglomerate: (action: SplitAgglomerateUpdateAction): Description => { + const segment1Description = + action.value.segmentPosition1 != null + ? `at position ${action.value.segmentPosition1}` + : action.value.segmentId1 ?? "unknown"; + const segment2Description = + action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + const description = `Split agglomerate ${action.value.agglomerateId} by separating the segments ${segment1Description} and ${segment2Description}.`; + return { + description, + icon: , + }; + }, + mergeAgglomerate: (action: MergeAgglomerateUpdateAction): Description => { + const segment1Description = + action.value.segmentPosition1 != null + ? `at position ${action.value.segmentPosition1}` + : action.value.segmentId1 ?? "unknown"; + const segment2Description = + action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; + const description = `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments ${segment1Description} and ${segment2Description}.`; + return { + description, + icon: , + }; + }, deleteTree: (action: DeleteTreeUpdateAction, count: number): Description => ({ description: count > 1 ? `Deleted ${count} trees.` : `Deleted the tree with id ${action.value.id}.`, From 56bb6ebcb9a0d25ae688a2f1e79a8db5ee598d30 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 16:21:18 +0100 Subject: [PATCH 242/361] remove/fix some more todo comments --- frontend/javascripts/admin/admin_rest_api.ts | 11 ++++------- .../oxalis/model/reducers/save_reducer.ts | 2 -- .../oxalis/model/sagas/annotation_saga.tsx | 1 - .../javascripts/oxalis/model/sagas/save_saga.ts | 14 ++++++-------- .../oxalis/view/action-bar/save_button.tsx | 6 ------ 5 files changed, 10 insertions(+), 24 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index ba5b34841f5..a0fa2d41430 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -883,17 +883,14 @@ export async function downloadAnnotation( annotationId: string, annotationType: APIAnnotationType, showVolumeFallbackDownloadWarning: boolean = false, - _version: number | null | undefined = null, + version: number | null | undefined = null, downloadFileFormat: "zarr3" | "wkw" | "nml" = "wkw", includeVolumeData: boolean = true, ) { const searchParams = new URLSearchParams(); - // TODOp: Use the version parameter - /*Object.entries(versions).forEach(([key, val]) => { - if (val != null) { - searchParams.append(`${key}Version`, val.toString()); - } - });*/ + if (version != null) { + searchParams.append("version", version.toString()); + } if (includeVolumeData && showVolumeFallbackDownloadWarning) { Toast.info(messages["annotation.no_fallback_data_included"], { diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index f7b8b434e46..873633128a9 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -184,8 +184,6 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } const LAYER_INDEPENDENT_ACTIONS = new Set([ - // todop: Related to IsolationSensitiveAction in backend? - // todop: sync this with the backend. The backend currently has only two such actions that have this requirement. "updateTdCamera", "revertToVersion", "addLayerToAnnotation", diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 0f3bfa6134e..5bb550e9eb0 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -100,7 +100,6 @@ export function* pushAnnotationUpdateAsync(action: Action) { yield* retry( SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, - // todop: shouldn't this work via the save queue now? editAnnotation, tracing.annotationId, tracing.annotationType, diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 0f844b61ce6..4f04a00d284 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -38,9 +38,10 @@ import { SAVE_RETRY_WAITING_TIME, } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; -import type { - UpdateActionWithoutIsolationRequirement, - UpdateActionWithTracingId, +import { + updateTdCamera, + type UpdateActionWithoutIsolationRequirement, + type UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; @@ -347,8 +348,8 @@ export function performDiffTracing( tracing: SkeletonTracing | VolumeTracing, prevFlycam: Flycam, flycam: Flycam, - _prevTdCamera: CameraData, - _tdCamera: CameraData, + prevTdCamera: CameraData, + tdCamera: CameraData, ): Array { let actions: Array = []; @@ -364,12 +365,9 @@ export function performDiffTracing( ); } - /* - TODOp: restore this update action (decide how to handle it, does it belong to skeleton or volume or something else?) if (prevTdCamera !== tdCamera) { actions = actions.concat(updateTdCamera()); } - */ return actions; } diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index d08db492659..e16722e51f9 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -175,12 +175,6 @@ function getOldestUnsavedTimestamp(saveQueue: SaveState["queue"]): number | null let oldestUnsavedTimestamp; if (saveQueue.length > 0) { - // todop: theoretically, could this be not the oldest one? - // e.g., items are added to the queue like that: - // SkelT=1, SkelT=2, SkelT=3, VolT=1 - // now the first action is saved and the following remains: - // SkelT=2, SkelT=3, VolT=1 - // even if it could happen, probably not critical for the current context? oldestUnsavedTimestamp = saveQueue[0].timestamp; } From 31bb49193b3258dd58cb064cc54c900cefc39181 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 17:28:07 +0100 Subject: [PATCH 243/361] pass tracingId directly to update actions instead of magically adding it later to some (but not all) actions --- .../javascripts/oxalis/geometries/skeleton.ts | 6 +- frontend/javascripts/oxalis/merger_mode.ts | 12 +- .../layer_rendering_manager.ts | 2 +- .../bucket_data_handling/wkstore_adapter.ts | 2 +- .../compaction/compact_toggle_actions.ts | 4 +- .../compaction/compact_update_actions.ts | 7 +- .../oxalis/model/reducers/save_reducer.ts | 2 - .../oxalis/model/sagas/proofread_saga.ts | 19 ++- .../model/sagas/skeletontracing_saga.ts | 52 ++++---- .../oxalis/model/sagas/update_actions.ts | 117 ++++++++++++++---- .../oxalis/model/sagas/volumetracing_saga.tsx | 25 ++-- .../comment_tab/comment_tab_view.tsx | 6 +- .../backend-snapshot-tests/annotations.e2e.ts | 16 ++- .../test/reducers/save_reducer.spec.ts | 17 +-- .../test/sagas/compact_toggle_actions.spec.ts | 12 +- .../test/sagas/saga_integration.spec.ts | 5 +- .../javascripts/test/sagas/save_saga.spec.ts | 43 +++++-- 17 files changed, 247 insertions(+), 100 deletions(-) diff --git a/frontend/javascripts/oxalis/geometries/skeleton.ts b/frontend/javascripts/oxalis/geometries/skeleton.ts index 3b4c8968ae0..6660fc521b5 100644 --- a/frontend/javascripts/oxalis/geometries/skeleton.ts +++ b/frontend/javascripts/oxalis/geometries/skeleton.ts @@ -316,7 +316,11 @@ class Skeleton { */ refresh(skeletonTracing: SkeletonTracing) { const state = Store.getState(); - const diff = cachedDiffTrees(this.prevTracing.trees, skeletonTracing.trees); + const diff = cachedDiffTrees( + skeletonTracing.tracingId, + this.prevTracing.trees, + skeletonTracing.trees, + ); for (const update of diff) { switch (update.name) { diff --git a/frontend/javascripts/oxalis/merger_mode.ts b/frontend/javascripts/oxalis/merger_mode.ts index d934f23596d..5a4176e4589 100644 --- a/frontend/javascripts/oxalis/merger_mode.ts +++ b/frontend/javascripts/oxalis/merger_mode.ts @@ -265,7 +265,11 @@ async function onUpdateNode(mergerModeState: MergerModeState, node: UpdateAction // If the segment of the node changed, it is like the node got deleted and a copy got created somewhere else. // Thus we use the onNodeDelete and onNodeCreate method to update the mapping. if (nodeSegmentMap[id] != null) { - await onDeleteNode(mergerModeState, { nodeId: id, treeId }, false); + await onDeleteNode( + mergerModeState, + { nodeId: id, treeId, actionTracingId: mergerModeState.prevTracing.tracingId }, + false, + ); } if (segmentId != null && segmentId > 0) { @@ -287,7 +291,11 @@ async function onUpdateNode(mergerModeState: MergerModeState, node: UpdateAction } function updateState(mergerModeState: MergerModeState, skeletonTracing: SkeletonTracing) { - const diff = cachedDiffTrees(mergerModeState.prevTracing.trees, skeletonTracing.trees); + const diff = cachedDiffTrees( + skeletonTracing.tracingId, + mergerModeState.prevTracing.trees, + skeletonTracing.trees, + ); for (const action of diff) { switch (action.name) { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts index b19e1a51333..0e6b71e2b7a 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts @@ -306,7 +306,7 @@ export default class LayerRenderingManager { (storeState) => getSegmentsForLayer(storeState, this.name), (newSegments) => { const cuckoo = this.getCustomColorCuckooTable(); - for (const updateAction of cachedDiffSegmentLists(prevSegments, newSegments)) { + for (const updateAction of cachedDiffSegmentLists(this.name, prevSegments, newSegments)) { if ( updateAction.name === "updateSegment" || updateAction.name === "createSegment" || diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 595f6d7d53c..28f56b7e813 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -289,7 +289,7 @@ export async function createCompressedUpdateBucketActions( return compressedBase64Strings.map((compressedBase64, index) => { const bucket = batchSubset[index]; const bucketInfo = createSendBucketInfo(bucket.zoomedAddress, bucket.cube.magInfo); - return updateBucket(bucketInfo, compressedBase64); + return updateBucket(bucketInfo, compressedBase64, bucket.getTracingId()); }); }), ), diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index 159b6511c17..63efbb9106d 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -176,8 +176,8 @@ export default function compactToggleActions( // If less than 50% of the toggled trees are exceptions, we should use the compaction const shouldUseToggleGroup = exceptions.length < 0.5 * affectedTreeCount; const compactedToggleActions = [ - updateTreeGroupVisibility(commonAncestor, commonVisibility), - ...exceptions.map((tree) => updateTreeVisibility(tree)), + updateTreeGroupVisibility(commonAncestor, commonVisibility, tracing.tracingId), + ...exceptions.map((tree) => updateTreeVisibility(tree, tracing.tracingId)), ]; const finalToggleActions = shouldUseToggleGroup ? compactedToggleActions : toggleActions; return remainingActions.concat(finalToggleActions); diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index 886c384bde0..674c28c256c 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -78,6 +78,7 @@ function compactMovedNodesAndEdges(updateActions: Array -1) { // Insert before the deleteTreeUA compactedActions.splice( deleteTreeUAIndex, 0, - moveTreeComponent(oldTreeId, newTreeId, nodeIds), + moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId), ); } else { // Insert in front - compactedActions.unshift(moveTreeComponent(oldTreeId, newTreeId, nodeIds)); + compactedActions.unshift(moveTreeComponent(oldTreeId, newTreeId, nodeIds, actionTracingId)); } // Remove the original create/delete update actions of the moved nodes and edges. diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 873633128a9..3e5ad4e0559 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -187,8 +187,6 @@ const LAYER_INDEPENDENT_ACTIONS = new Set([ "updateTdCamera", "revertToVersion", "addLayerToAnnotation", - "deleteLayerFromAnnotation", - "updateLayerMetadata", "updateMetadataOfAnnotation", ]); diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 889ff4d40c9..cc0730d0bd0 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -406,6 +406,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); const mergedMapping = yield* call( @@ -430,6 +431,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); } else if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS") { @@ -580,7 +582,13 @@ function* performMinCut( edge.segmentId2, ); items.push( - splitAgglomerate(sourceAgglomerateId, edge.segmentId1, edge.segmentId2, agglomerateFileMag), + splitAgglomerate( + sourceAgglomerateId, + edge.segmentId1, + edge.segmentId2, + agglomerateFileMag, + volumeTracingId, + ), ); } @@ -661,7 +669,13 @@ function* performCutFromNeighbors( } items.push( - splitAgglomerate(agglomerateId, edge.segmentId1, edge.segmentId2, agglomerateFileMag), + splitAgglomerate( + agglomerateId, + edge.segmentId1, + edge.segmentId2, + agglomerateFileMag, + volumeTracingId, + ), ); } @@ -727,6 +741,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { sourceInfo.unmappedId, targetInfo.unmappedId, agglomerateFileMag, + volumeTracingId, ), ); diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index 3bb6c8ca5bf..ffb3b6b68d7 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -479,6 +479,7 @@ export function* watchSkeletonTracingAsync(): Saga { } function* diffNodes( + tracingId: string, prevNodes: NodeMap, nodes: NodeMap, treeId: number, @@ -491,12 +492,12 @@ function* diffNodes( } = diffDiffableMaps(prevNodes, nodes); for (const nodeId of deletedNodeIds) { - yield deleteNode(treeId, nodeId); + yield deleteNode(treeId, nodeId, tracingId); } for (const nodeId of addedNodeIds) { const node = nodes.getOrThrow(nodeId); - yield createNode(treeId, node); + yield createNode(treeId, node, tracingId); } for (const nodeId of changedNodeIds) { @@ -504,7 +505,7 @@ function* diffNodes( const prevNode = prevNodes.getOrThrow(nodeId); if (updateNodePredicate(prevNode, node)) { - yield updateNode(treeId, node); + yield updateNode(treeId, node, tracingId); } } } @@ -514,6 +515,7 @@ function updateNodePredicate(prevNode: Node, node: Node): boolean { } function* diffEdges( + tracingId: string, prevEdges: EdgeCollection, edges: EdgeCollection, treeId: number, @@ -522,11 +524,11 @@ function* diffEdges( const { onlyA: deletedEdges, onlyB: addedEdges } = diffEdgeCollections(prevEdges, edges); for (const edge of deletedEdges) { - yield deleteEdge(treeId, edge.source, edge.target); + yield deleteEdge(treeId, edge.source, edge.target, tracingId); } for (const edge of addedEdges) { - yield createEdge(treeId, edge.source, edge.target); + yield createEdge(treeId, edge.source, edge.target, tracingId); } } @@ -557,6 +559,7 @@ function updateTreePredicate(prevTree: Tree, tree: Tree): boolean { } export function* diffTrees( + tracingId: string, prevTrees: TreeMap, trees: TreeMap, ): Generator { @@ -572,16 +575,16 @@ export function* diffTrees( for (const treeId of deletedTreeIds) { const prevTree = prevTrees[treeId]; - yield* diffNodes(prevTree.nodes, new DiffableMap(), treeId); - yield* diffEdges(prevTree.edges, new EdgeCollection(), treeId); - yield deleteTree(treeId); + yield* diffNodes(tracingId, prevTree.nodes, new DiffableMap(), treeId); + yield* diffEdges(tracingId, prevTree.edges, new EdgeCollection(), treeId); + yield deleteTree(treeId, tracingId); } for (const treeId of addedTreeIds) { const tree = trees[treeId]; - yield createTree(tree); - yield* diffNodes(new DiffableMap(), tree.nodes, treeId); - yield* diffEdges(new EdgeCollection(), tree.edges, treeId); + yield createTree(tree, tracingId); + yield* diffNodes(tracingId, new DiffableMap(), tree.nodes, treeId); + yield* diffEdges(tracingId, new EdgeCollection(), tree.edges, treeId); } for (const treeId of bothTreeIds) { @@ -589,25 +592,25 @@ export function* diffTrees( const prevTree: Tree = prevTrees[treeId]; if (tree !== prevTree) { - yield* diffNodes(prevTree.nodes, tree.nodes, treeId); - yield* diffEdges(prevTree.edges, tree.edges, treeId); + yield* diffNodes(tracingId, prevTree.nodes, tree.nodes, treeId); + yield* diffEdges(tracingId, prevTree.edges, tree.edges, treeId); if (updateTreePredicate(prevTree, tree)) { - yield updateTree(tree); + yield updateTree(tree, tracingId); } if (prevTree.isVisible !== tree.isVisible) { - yield updateTreeVisibility(tree); + yield updateTreeVisibility(tree, tracingId); } if (prevTree.edgesAreVisible !== tree.edgesAreVisible) { - yield updateTreeEdgesVisibility(tree); + yield updateTreeEdgesVisibility(tree, tracingId); } } } } -export const cachedDiffTrees = memoizeOne((prevTrees: TreeMap, trees: TreeMap) => - Array.from(diffTrees(prevTrees, trees)), +export const cachedDiffTrees = memoizeOne((tracingId: string, prevTrees: TreeMap, trees: TreeMap) => + Array.from(diffTrees(tracingId, prevTrees, trees)), ); export function* diffSkeletonTracing( @@ -617,12 +620,16 @@ export function* diffSkeletonTracing( flycam: Flycam, ): Generator { if (prevSkeletonTracing !== skeletonTracing) { - for (const action of cachedDiffTrees(prevSkeletonTracing.trees, skeletonTracing.trees)) { + for (const action of cachedDiffTrees( + skeletonTracing.tracingId, + prevSkeletonTracing.trees, + skeletonTracing.trees, + )) { yield action; } if (prevSkeletonTracing.treeGroups !== skeletonTracing.treeGroups) { - yield updateTreeGroups(skeletonTracing.treeGroups); + yield updateTreeGroups(skeletonTracing.treeGroups, skeletonTracing.tracingId); } } @@ -637,7 +644,10 @@ export function* diffSkeletonTracing( } if (!_.isEqual(prevSkeletonTracing.userBoundingBoxes, skeletonTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxesInSkeletonTracing(skeletonTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInSkeletonTracing( + skeletonTracing.userBoundingBoxes, + skeletonTracing.tracingId, + ); } } export default [ diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index b5e161e4e62..b9c2c635e26 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -149,10 +149,11 @@ export type ServerUpdateAction = AsServerAction< | CreateTracingUpdateAction >; -export function createTree(tree: Tree) { +export function createTree(tree: Tree, actionTracingId: string) { return { name: "createTree", value: { + actionTracingId, id: tree.treeId, updatedId: undefined, color: tree.color, @@ -168,18 +169,20 @@ export function createTree(tree: Tree) { }, } as const; } -export function deleteTree(treeId: number) { +export function deleteTree(treeId: number, actionTracingId: string) { return { name: "deleteTree", value: { + actionTracingId, id: treeId, }, } as const; } -export function updateTree(tree: Tree) { +export function updateTree(tree: Tree, actionTracingId: string) { return { name: "updateTree", value: { + actionTracingId, id: tree.treeId, updatedId: tree.treeId, color: tree.color, @@ -195,58 +198,78 @@ export function updateTree(tree: Tree) { }, } as const; } -export function updateTreeVisibility(tree: Tree) { +export function updateTreeVisibility(tree: Tree, actionTracingId: string) { const { treeId, isVisible } = tree; return { name: "updateTreeVisibility", value: { + actionTracingId, treeId, isVisible, }, } as const; } -export function updateTreeEdgesVisibility(tree: Tree) { +export function updateTreeEdgesVisibility(tree: Tree, actionTracingId: string) { const { treeId, edgesAreVisible } = tree; return { name: "updateTreeEdgesVisibility", value: { + actionTracingId, treeId, edgesAreVisible, }, } as const; } -export function updateTreeGroupVisibility(groupId: number | null | undefined, isVisible: boolean) { +export function updateTreeGroupVisibility( + groupId: number | null | undefined, + isVisible: boolean, + actionTracingId: string, +) { return { name: "updateTreeGroupVisibility", value: { + actionTracingId, treeGroupId: groupId, isVisible, }, } as const; } -export function mergeTree(sourceTreeId: number, targetTreeId: number) { +export function mergeTree(sourceTreeId: number, targetTreeId: number, actionTracingId: string) { return { name: "mergeTree", value: { + actionTracingId, sourceId: sourceTreeId, targetId: targetTreeId, }, } as const; } -export function createEdge(treeId: number, sourceNodeId: number, targetNodeId: number) { +export function createEdge( + treeId: number, + sourceNodeId: number, + targetNodeId: number, + actionTracingId: string, +) { return { name: "createEdge", value: { + actionTracingId, treeId, source: sourceNodeId, target: targetNodeId, }, } as const; } -export function deleteEdge(treeId: number, sourceNodeId: number, targetNodeId: number) { +export function deleteEdge( + treeId: number, + sourceNodeId: number, + targetNodeId: number, + actionTracingId: string, +) { return { name: "deleteEdge", value: { + actionTracingId, treeId, source: sourceNodeId, target: targetNodeId, @@ -259,28 +282,35 @@ export type UpdateActionNode = Omit & { treeId: number; }; -export function createNode(treeId: number, node: Node) { +export function createNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, ...restNode } = node; return { name: "createNode", - value: { ...restNode, position: untransformedPosition, treeId } as UpdateActionNode, + value: { + actionTracingId, + ...restNode, + position: untransformedPosition, + treeId, + } as UpdateActionNode, } as const; } -export function updateNode(treeId: number, node: Node) { +export function updateNode(treeId: number, node: Node, actionTracingId: string) { const { untransformedPosition, ...restNode } = node; return { name: "updateNode", value: { + actionTracingId, ...restNode, position: untransformedPosition, treeId, } as UpdateActionNode, } as const; } -export function deleteNode(treeId: number, nodeId: number) { +export function deleteNode(treeId: number, nodeId: number, actionTracingId: string) { return { name: "deleteNode", value: { + actionTracingId, treeId, nodeId, }, @@ -288,6 +318,7 @@ export function deleteNode(treeId: number, nodeId: number) { } export function updateSkeletonTracing( tracing: { + tracingId: string; activeNodeId: number | null | undefined; }, editPosition: Vector3, @@ -298,6 +329,7 @@ export function updateSkeletonTracing( return { name: "updateSkeletonTracing", value: { + actionTracingId: tracing.tracingId, activeNode: tracing.activeNodeId, editPosition, editPositionAdditionalCoordinates, @@ -310,10 +342,12 @@ export function moveTreeComponent( sourceTreeId: number, targetTreeId: number, nodeIds: Array, + actionTracingId: string, ) { return { name: "moveTreeComponent", value: { + actionTracingId, sourceId: sourceTreeId, targetId: targetTreeId, nodeIds, @@ -330,6 +364,7 @@ export function updateVolumeTracing( return { name: "updateVolumeTracing", value: { + actionTracingId: tracing.tracingId, activeSegmentId: tracing.activeCellId, editPosition: position, editPositionAdditionalCoordinates, @@ -341,18 +376,24 @@ export function updateVolumeTracing( } export function updateUserBoundingBoxesInSkeletonTracing( userBoundingBoxes: Array, + actionTracingId: string, ) { return { name: "updateUserBoundingBoxesInSkeletonTracing", value: { + actionTracingId, boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), }, } as const; } -export function updateUserBoundingBoxesInVolumeTracing(userBoundingBoxes: Array) { +export function updateUserBoundingBoxesInVolumeTracing( + userBoundingBoxes: Array, + actionTracingId: string, +) { return { name: "updateUserBoundingBoxesInVolumeTracing", value: { + actionTracingId, boundingBoxes: convertUserBoundingBoxesFromFrontendToServer(userBoundingBoxes), }, } as const; @@ -364,11 +405,13 @@ export function createSegmentVolumeAction( color: Vector3 | null, groupId: number | null | undefined, metadata: MetadataEntryProto[], + actionTracingId: string, creationTime: number | null | undefined = Date.now(), ) { return { name: "createSegment", value: { + actionTracingId, id, anchorPosition, name, @@ -388,11 +431,13 @@ export function updateSegmentVolumeAction( color: Vector3 | null, groupId: number | null | undefined, metadata: Array, + actionTracingId: string, creationTime: number | null | undefined = Date.now(), ) { return { name: "updateSegment", value: { + actionTracingId, id, anchorPosition, additionalCoordinates, @@ -404,44 +449,54 @@ export function updateSegmentVolumeAction( }, } as const; } -export function deleteSegmentVolumeAction(id: number) { +export function deleteSegmentVolumeAction(id: number, actionTracingId: string) { return { name: "deleteSegment", value: { + actionTracingId, id, }, } as const; } -export function deleteSegmentDataVolumeAction(id: number) { +export function deleteSegmentDataVolumeAction(id: number, actionTracingId: string) { return { name: "deleteSegmentData", value: { + actionTracingId, id, }, } as const; } -export function updateBucket(bucketInfo: SendBucketInfo, base64Data: string) { +export function updateBucket( + bucketInfo: SendBucketInfo, + base64Data: string, + actionTracingId: string, +) { return { name: "updateBucket", - value: Object.assign({}, bucketInfo, { + value: { + actionTracingId, + ...bucketInfo, base64Data, - }), + }, } as const; } -export function updateSegmentGroups(segmentGroups: Array) { +export function updateSegmentGroups(segmentGroups: Array, actionTracingId: string) { return { name: "updateSegmentGroups", value: { + actionTracingId, segmentGroups, }, } as const; } -export function updateTreeGroups(treeGroups: Array) { +export function updateTreeGroups(treeGroups: Array, actionTracingId: string) { return { name: "updateTreeGroups", value: { + actionTracingId, treeGroups, }, } as const; @@ -454,10 +509,12 @@ export function revertToVersion(version: number) { }, } as const; } -export function removeFallbackLayer() { +export function removeFallbackLayer(actionTracingId: string) { return { name: "removeFallbackLayer", - value: {}, + value: { + actionTracingId, + }, } as const; } export function updateTdCamera() { @@ -478,10 +535,16 @@ export function updateMappingName( mappingName: string | null | undefined, isEditable: boolean | null | undefined, isLocked: boolean | undefined, + actionTracingId: string, ) { return { name: "updateMappingName", - value: { mappingName, isEditable, isLocked }, + value: { + actionTracingId, + mappingName, + isEditable, + isLocked, + }, } as const; } export function splitAgglomerate( @@ -489,9 +552,11 @@ export function splitAgglomerate( segmentId1: NumberLike, segmentId2: NumberLike, mag: Vector3, + actionTracingId: string, ): { name: "splitAgglomerate"; value: { + actionTracingId: string; agglomerateId: number; segmentId1: number | undefined; segmentId2: number | undefined; @@ -506,6 +571,7 @@ export function splitAgglomerate( return { name: "splitAgglomerate", value: { + actionTracingId, // TODO: Proper 64 bit support (#6921) agglomerateId: Number(agglomerateId), segmentId1: Number(segmentId1), @@ -520,9 +586,11 @@ export function mergeAgglomerate( segmentId1: NumberLike, segmentId2: NumberLike, mag: Vector3, + actionTracingId: string, ): { name: "mergeAgglomerate"; value: { + actionTracingId: string; agglomerateId1: number; agglomerateId2: number; segmentId1: number | undefined; @@ -538,6 +606,7 @@ export function mergeAgglomerate( return { name: "mergeAgglomerate", value: { + actionTracingId, // TODO: Proper 64 bit support (#6921) agglomerateId1: Number(agglomerateId1), agglomerateId2: Number(agglomerateId2), diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index de6e7a1f6cf..cdd7b8ff65b 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -634,11 +634,12 @@ function updateTracingPredicate( } export const cachedDiffSegmentLists = memoizeOne( - (prevSegments: SegmentMap, newSegments: SegmentMap) => - Array.from(uncachedDiffSegmentLists(prevSegments, newSegments)), + (tracingId: string, prevSegments: SegmentMap, newSegments: SegmentMap) => + Array.from(uncachedDiffSegmentLists(tracingId, prevSegments, newSegments)), ); function* uncachedDiffSegmentLists( + tracingId: string, prevSegments: SegmentMap, newSegments: SegmentMap, ): Generator { @@ -649,7 +650,7 @@ function* uncachedDiffSegmentLists( } = diffDiffableMaps(prevSegments, newSegments); for (const segmentId of deletedSegmentIds) { - yield deleteSegmentVolumeAction(segmentId); + yield deleteSegmentVolumeAction(segmentId, tracingId); } for (const segmentId of addedSegmentIds) { @@ -661,6 +662,7 @@ function* uncachedDiffSegmentLists( segment.color, segment.groupId, segment.metadata, + tracingId, ); } @@ -677,6 +679,7 @@ function* uncachedDiffSegmentLists( segment.color, segment.groupId, segment.metadata, + tracingId, segment.creationTime, ); } @@ -699,12 +702,16 @@ export function* diffVolumeTracing( } if (!_.isEqual(prevVolumeTracing.userBoundingBoxes, volumeTracing.userBoundingBoxes)) { - yield updateUserBoundingBoxesInVolumeTracing(volumeTracing.userBoundingBoxes); + yield updateUserBoundingBoxesInVolumeTracing( + volumeTracing.userBoundingBoxes, + volumeTracing.tracingId, + ); } if (prevVolumeTracing !== volumeTracing) { if (prevVolumeTracing.segments !== volumeTracing.segments) { for (const action of cachedDiffSegmentLists( + volumeTracing.tracingId, prevVolumeTracing.segments, volumeTracing.segments, )) { @@ -713,11 +720,11 @@ export function* diffVolumeTracing( } if (prevVolumeTracing.segmentGroups !== volumeTracing.segmentGroups) { - yield updateSegmentGroups(volumeTracing.segmentGroups); + yield updateSegmentGroups(volumeTracing.segmentGroups, volumeTracing.tracingId); } if (prevVolumeTracing.fallbackLayer != null && volumeTracing.fallbackLayer == null) { - yield removeFallbackLayer(); + yield removeFallbackLayer(volumeTracing.tracingId); } if ( @@ -730,6 +737,7 @@ export function* diffVolumeTracing( volumeTracing.mappingName || null, volumeTracing.hasEditableMapping || null, volumeTracing.mappingIsLocked, + volumeTracing.tracingId, ); yield action; } @@ -947,7 +955,10 @@ function* handleDeleteSegmentData(): Saga { yield* put(setBusyBlockingInfoAction(true, "Segment is being deleted.")); yield* put( - pushSaveQueueTransaction([deleteSegmentDataVolumeAction(action.segmentId)], action.layerName), + pushSaveQueueTransaction( + [deleteSegmentDataVolumeAction(action.segmentId, action.layerName)], + action.layerName, + ), ); yield* call([Model, Model.ensureSavedState]); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx index 0e5fd08ea85..64b0e40dc81 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx @@ -538,7 +538,11 @@ const CommentTabViewMemo = React.memo( } const updateActions = Array.from( - cachedDiffTrees(prevPops.skeletonTracing.trees, nextProps.skeletonTracing.trees), + cachedDiffTrees( + nextProps.skeletonTracing.tracingId, + prevPops.skeletonTracing.trees, + nextProps.skeletonTracing.trees, + ), ); const relevantUpdateActions = updateActions.filter( (ua) => diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 2364cd6ff07..d9e322c0e0b 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -156,9 +156,11 @@ async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueE test.serial("Send update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const tracingId = createdExplorational.annotationLayers[0].tracingId; const initialSkeleton = { activeNodeId: undefined, userBoundingBoxes: [], + tracingId, }; const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( @@ -167,7 +169,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { [UpdateActions.updateSkeletonTracing(initialSkeleton, [2, 3, 4], null, [1, 2, 3], 2)], ], 123456789, - createdExplorational.annotationLayers[0].tracingId, + tracingId, ), 0, ); @@ -196,8 +198,9 @@ test("Send complex update actions and compare resulting tracing", async (t) => { ], }, ]; - const createTreesUpdateActions = Array.from(diffTrees({}, trees)); - const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups); + const someTracingId = "someTracingId"; + const createTreesUpdateActions = Array.from(diffTrees(someTracingId, {}, trees)); + const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups, someTracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], @@ -214,7 +217,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { test("Update Metadata for Skeleton Tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 5)); - const createTreesUpdateActions = Array.from(diffTrees({}, trees)); + const createTreesUpdateActions = Array.from(diffTrees("someTracingId", {}, trees)); const metadata = [ { key: "city", @@ -233,12 +236,13 @@ test("Update Metadata for Skeleton Tracing", async (t) => { ...trees[1], metadata, }; - const updateTreeAction = UpdateActions.updateTree(trees[1]); + const { tracingId } = createdExplorational.annotationLayers[0]; + const updateTreeAction = UpdateActions.updateTree(trees[1], tracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeAction]], 123456789, - createdExplorational.annotationLayers[0].tracingId, + tracingId, ), 0, ); diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index cde84f98ec5..56c0e913671 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -40,14 +40,17 @@ const initialState = { tracing: {}, } as any as OxalisState; test("Save should add update actions to the queue", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add more update actions to the queue", (t) => { - const getItems = (treeId: number) => [createEdge(treeId, 1, 2), createEdge(treeId, 2, 3)]; + const getItems = (treeId: number) => [ + createEdge(treeId, 1, 2, tracingId), + createEdge(treeId, 2, 3, tracingId), + ]; const saveQueue = createSaveQueueFromUpdateActions( [getItems(0), getItems(1)], TIMESTAMP, @@ -70,8 +73,8 @@ test("Save should add zero update actions to the queue", (t) => { t.deepEqual(newState.save.queue, []); }); test("Save should remove one update actions from the queue", (t) => { - const firstItem = [createEdge(0, 1, 2)]; - const secondItem = [createEdge(1, 2, 3)]; + const firstItem = [createEdge(0, 1, 2, tracingId)]; + const secondItem = [createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP, tracingId); const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton", tracingId); const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton", tracingId); @@ -82,7 +85,7 @@ test("Save should remove one update actions from the queue", (t) => { t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove zero update actions from the queue", (t) => { - const items = [createEdge(0, 1, 2), createEdge(1, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const popAction = SaveActions.shiftSaveQueueAction(0); @@ -91,7 +94,7 @@ test("Save should remove zero update actions from the queue", (t) => { t.deepEqual(newState.save.queue, saveQueue); }); test("Save should remove all update actions from the queue (1/2)", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); @@ -99,7 +102,7 @@ test("Save should remove all update actions from the queue (1/2)", (t) => { t.deepEqual(newState.save.queue, []); }); test("Save should remove all update actions from the queue (2/2)", (t) => { - const items = [createEdge(0, 1, 2), createEdge(0, 2, 3)]; + const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); diff --git a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts index ab4b7c12d28..40928585ea0 100644 --- a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts +++ b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts @@ -54,7 +54,7 @@ const treeGroups: TreeGroup[] = [ }, ]; const flycamMock = {} as any as Flycam; - +const tracingId = "someTracingId"; const createState = (trees: Tree[], _treeGroups: TreeGroup[]): OxalisState => ({ ...defaultState, tracing: { @@ -63,7 +63,7 @@ const createState = (trees: Tree[], _treeGroups: TreeGroup[]): OxalisState => ({ additionalAxes: [], createdTimestamp: 0, version: 0, - tracingId: "tracingId", + tracingId, boundingBox: null, userBoundingBoxes: [], type: "skeleton", @@ -119,7 +119,7 @@ function _updateTreeVisibility(treeId: number, isVisible: boolean) { treeId, isVisible, } as any as Tree; - return updateTreeVisibility(tree); + return updateTreeVisibility(tree, tracingId); } function getActions(initialState: OxalisState, newState: OxalisState) { @@ -163,7 +163,7 @@ test("compactUpdateActions should compact when toggling all trees", (t) => { ); const [compactedActions] = getActions(allVisible, testState); // Root group should be toggled - t.deepEqual(compactedActions, [updateTreeGroupVisibility(undefined, false)]); + t.deepEqual(compactedActions, [updateTreeGroupVisibility(undefined, false, tracingId)]); }); test("compactUpdateActions should compact when toggling a group", (t) => { // Let's toggle group 3 (which contains group 4) @@ -179,7 +179,7 @@ test("compactUpdateActions should compact when toggling a group", (t) => { treeGroups, ); const [compactedActions] = getActions(allVisible, testState); - t.deepEqual(compactedActions, [updateTreeGroupVisibility(3, false)]); + t.deepEqual(compactedActions, [updateTreeGroupVisibility(3, false, tracingId)]); }); test("compactUpdateActions should compact when toggling a group except for one tree", (t) => { // Let's make all trees invisible except for tree 3. Compaction should yield a toggle-root and toggle 3 action @@ -196,7 +196,7 @@ test("compactUpdateActions should compact when toggling a group except for one t ); const [compactedActions] = getActions(allVisible, testState); t.deepEqual(compactedActions, [ - updateTreeGroupVisibility(undefined, false), + updateTreeGroupVisibility(undefined, false, tracingId), _updateTreeVisibility(3, true), ]); }); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 90f5de2a0ff..20e9275933c 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -50,7 +50,8 @@ test.serial( "watchTreeNames saga should rename empty trees in tasks and these updates should be persisted", (t) => { const state = Store.getState(); - const treeWithEmptyName = enforceSkeletonTracing(state.tracing).trees[1]; + const skeletonTracing = enforceSkeletonTracing(state.tracing); + const treeWithEmptyName = skeletonTracing.trees[1]; const treeWithCorrectName = update(treeWithEmptyName, { name: { $set: generateTreeName(state, treeWithEmptyName.timestamp, treeWithEmptyName.treeId), @@ -59,7 +60,7 @@ test.serial( const expectedSaveQueue = createSaveQueueFromUpdateActions( [ [ - UpdateActions.updateTree(treeWithCorrectName), + UpdateActions.updateTree(treeWithCorrectName, skeletonTracing.tracingId), UpdateActions.updateSkeletonTracing( enforceSkeletonTracing(Store.getState().tracing), [1, 2, 3], diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 963effa2e4a..1784214f72e 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -64,6 +64,7 @@ const initialState = { }, annotationType: "Explorational", name: "", + tracingId, activeTreeId: 1, activeNodeId: null, restrictions: { @@ -90,7 +91,10 @@ test("SaveSaga should compact multiple updateTracing update actions", (t) => { t.deepEqual(compactSaveQueue(saveQueue), [saveQueue[1]]); }); test("SaveSaga should send update actions", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); @@ -122,7 +126,10 @@ test("SaveSaga should send update actions", (t) => { }); test("SaveSaga should send request to server", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, tracingId, ); @@ -156,7 +163,10 @@ test("SaveSaga should send request to server", (t) => { }); test("SaveSaga should retry update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, tracingId, ); @@ -195,7 +205,10 @@ test("SaveSaga should retry update actions", (t) => { }); test("SaveSaga should escalate on permanent client error update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( - [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]], + [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ], TIMESTAMP, tracingId, ); @@ -242,7 +255,10 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => t.throws(() => saga.next()); }); test("SaveSaga should send update actions right away and try to reach a state where all updates are saved", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); @@ -265,7 +281,10 @@ test("SaveSaga should send update actions right away and try to reach a state wh expectValueDeepEqual(t, saga.next([]), put(setSaveBusyAction(false))); }); test("SaveSaga should not try to reach state with all actions being saved when saving is triggered by a timeout", (t) => { - const updateActions = [[UpdateActions.createEdge(1, 0, 1)], [UpdateActions.createEdge(1, 1, 2)]]; + const updateActions = [ + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + ]; const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); @@ -305,9 +324,9 @@ test("SaveSaga should remove the correct update actions", (t) => { test("SaveSaga should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.createEdge(1, 0, 1)], - [UpdateActions.createEdge(1, 1, 2)], - [UpdateActions.createEdge(2, 3, 4)], + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + [UpdateActions.createEdge(2, 3, 4, tracingId)], ], TIMESTAMP, tracingId, @@ -358,9 +377,9 @@ test("SaveSaga should set the correct version numbers if the save queue was comp test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ - [UpdateActions.createEdge(1, 0, 1)], - [UpdateActions.createEdge(1, 1, 2)], - [UpdateActions.createEdge(2, 3, 4)], + [UpdateActions.createEdge(1, 0, 1, tracingId)], + [UpdateActions.createEdge(1, 1, 2, tracingId)], + [UpdateActions.createEdge(2, 3, 4, tracingId)], ], TIMESTAMP, From bb9f1469c8374fadf9ef5701587a5d2f5b1dbbca Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 29 Nov 2024 17:37:40 +0100 Subject: [PATCH 244/361] get rid of unused-tracing-id and superfluous passing of relevant tracing ids --- .../oxalis/model/actions/save_actions.ts | 5 ---- .../model/bucket_data_handling/pushqueue.ts | 2 +- .../oxalis/model/reducers/save_reducer.ts | 28 +---------------- .../oxalis/model/sagas/annotation_saga.tsx | 11 ++----- .../left-border-tabs/layer_settings_tab.tsx | 8 +---- .../modals/add_volume_layer_modal.tsx | 4 --- .../backend-snapshot-tests/annotations.e2e.ts | 8 +---- .../javascripts/test/helpers/saveHelpers.ts | 4 +-- .../test/reducers/save_reducer.spec.ts | 30 ++++++++----------- .../test/sagas/saga_integration.spec.ts | 2 -- .../javascripts/test/sagas/save_saga.spec.ts | 14 ++------- .../test/sagas/skeletontracing_saga.spec.ts | 13 ++------ .../volumetracing/volumetracing_saga.spec.ts | 6 +--- 13 files changed, 26 insertions(+), 109 deletions(-) diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index f85453d9387..3ffd784b4b4 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -12,7 +12,6 @@ export type SaveQueueType = "skeleton" | "volume" | "mapping"; export type PushSaveQueueTransaction = { type: "PUSH_SAVE_QUEUE_TRANSACTION"; items: UpdateAction[]; - tracingId: string; transactionId: string; }; type SaveNowAction = ReturnType; @@ -42,25 +41,21 @@ export type SaveAction = // From this point on, we can assume that the groups fulfil the isolation requirement. export const pushSaveQueueTransaction = ( items: Array, - tracingId: string, transactionId: string = getUid(), ): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - tracingId, transactionId, }) as const; export const pushSaveQueueTransactionIsolated = ( item: UpdateActionWithIsolationRequirement, - tracingId: string, transactionId: string = getUid(), ): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items: [item], - tracingId, transactionId, }) as const; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index 9151402b388..4827ced6328 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -154,7 +154,7 @@ class PushQueue { const items = await this.fifoResolver.orderedWaitFor( createCompressedUpdateBucketActions(batch), ); - Store.dispatch(pushSaveQueueTransaction(items, this.tracingId, this.cube.layerName)); + Store.dispatch(pushSaveQueueTransaction(items)); this.compressingBucketCount -= batch.length; } catch (error) { diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 3e5ad4e0559..39c9c72edad 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -7,7 +7,6 @@ import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_a import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import Date from "libs/date"; -import type { UpdateAction, UpdateActionWithTracingId } from "../sagas/update_actions"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. @@ -52,7 +51,7 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { transactionGroupIndex, timestamp: Date.now(), authorId: activeUser.id, - actions: addTracingIdToActions(actions, dispatchedAction.tracingId), + actions, stats, // Redux Action Log context for debugging purposes. info: actionLogInfo, @@ -183,29 +182,4 @@ function SaveReducer(state: OxalisState, action: Action): OxalisState { } } -const LAYER_INDEPENDENT_ACTIONS = new Set([ - "updateTdCamera", - "revertToVersion", - "addLayerToAnnotation", - "updateMetadataOfAnnotation", -]); - -export function addTracingIdToActions( - actions: UpdateAction[], - tracingId: string, -): Array { - return actions.map((action) => { - if (LAYER_INDEPENDENT_ACTIONS.has(action.name)) { - return action as UpdateAction; - } - return { - ...action, - value: { - ...action.value, - actionTracingId: tracingId, - }, - } as UpdateActionWithTracingId; - }); -} - export default SaveReducer; diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 5bb550e9eb0..dd5a8f051c5 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -58,9 +58,7 @@ export function* pushAnnotationNameUpdateAction(action: SetAnnotationNameAction) if (!mayEdit) { return; } - yield* put( - pushSaveQueueTransaction([updateMetadataOfAnnotation(action.name)], "unused-tracing-id"), - ); + yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(action.name)])); } export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDescriptionAction) { @@ -68,12 +66,7 @@ export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDesc if (!mayEdit) { return; } - yield* put( - pushSaveQueueTransaction( - [updateMetadataOfAnnotation(undefined, action.description)], - "unused-tracing-id", - ), - ); + yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(undefined, action.description)])); } export function* pushAnnotationUpdateAsync(action: Action) { diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 6b2c8a412d6..0c110d0f59f 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -1637,18 +1637,12 @@ const mapDispatchToProps = (dispatch: Dispatch) => ({ name: "skeleton", fallbackLayerName: undefined, }), - "unused-tracing-id", ), ); }, deleteAnnotationLayer(tracingId: string, type: AnnotationLayerType, layerName: string) { - dispatch( - pushSaveQueueTransaction( - [deleteAnnotationLayer(tracingId, layerName, type)], - "unused-tracing-id", - ), - ); + dispatch(pushSaveQueueTransaction([deleteAnnotationLayer(tracingId, layerName, type)])); }, }); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index 8bf6001e6cf..034cf5cebaa 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -176,8 +176,6 @@ export default function AddVolumeLayerModal({ max: maxResolutionAllowed, }, }), - - "unused-tracing-id", ), ); await Model.ensureSavedState(); @@ -211,8 +209,6 @@ export default function AddVolumeLayerModal({ }, mappingName: maybeMappingName, }), - - "unused-tracing-id", ), ); await Model.ensureSavedState(); diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index d9e322c0e0b..64b0c7c9d23 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -169,7 +169,6 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { [UpdateActions.updateSkeletonTracing(initialSkeleton, [2, 3, 4], null, [1, 2, 3], 2)], ], 123456789, - tracingId, ), 0, ); @@ -205,7 +204,6 @@ test("Send complex update actions and compare resulting tracing", async (t) => { createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], 123456789, - createdExplorational.annotationLayers[0].tracingId, ), 0, ); @@ -239,11 +237,7 @@ test("Update Metadata for Skeleton Tracing", async (t) => { const { tracingId } = createdExplorational.annotationLayers[0]; const updateTreeAction = UpdateActions.updateTree(trees[1], tracingId); const [saveQueue] = addVersionNumbers( - createSaveQueueFromUpdateActions( - [createTreesUpdateActions, [updateTreeAction]], - 123456789, - tracingId, - ), + createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789), 0, ); diff --git a/frontend/javascripts/test/helpers/saveHelpers.ts b/frontend/javascripts/test/helpers/saveHelpers.ts index 30c32d013de..d91548d9195 100644 --- a/frontend/javascripts/test/helpers/saveHelpers.ts +++ b/frontend/javascripts/test/helpers/saveHelpers.ts @@ -1,5 +1,4 @@ import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; -import { addTracingIdToActions } from "oxalis/model/reducers/save_reducer"; import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import type { SaveQueueEntry } from "oxalis/store"; import dummyUser from "test/fixtures/dummy_user"; @@ -7,14 +6,13 @@ import dummyUser from "test/fixtures/dummy_user"; export function createSaveQueueFromUpdateActions( updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, - tracingId: string, stats: TracingStats | null = null, ): SaveQueueEntry[] { return updateActions.map((ua) => ({ version: -1, timestamp, stats, - actions: addTracingIdToActions(ua, tracingId), + actions: ua, info: "[]", transactionGroupCount: 1, authorId: dummyUser.id, diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 56c0e913671..ce19e3b3197 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -41,8 +41,8 @@ const initialState = { } as any as OxalisState; test("Save should add update actions to the queue", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, saveQueue); }); @@ -51,33 +51,29 @@ test("Save should add more update actions to the queue", (t) => { createEdge(treeId, 1, 2, tracingId), createEdge(treeId, 2, 3, tracingId), ]; - const saveQueue = createSaveQueueFromUpdateActions( - [getItems(0), getItems(1)], - TIMESTAMP, - tracingId, - ); + const saveQueue = createSaveQueueFromUpdateActions([getItems(0), getItems(1)], TIMESTAMP); const testState = SaveReducer( initialState, - SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton", tracingId), + SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), ); const newState = SaveReducer( testState, - SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton", tracingId), + SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton"), ); t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { const items: UpdateActionWithoutIsolationRequirement[] = []; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, []); }); test("Save should remove one update actions from the queue", (t) => { const firstItem = [createEdge(0, 1, 2, tracingId)]; const secondItem = [createEdge(1, 2, 3, tracingId)]; - const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP, tracingId); - const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton", tracingId); - const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton", tracingId); + const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP); + const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); + const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(1); let newState = SaveReducer(initialState, firstPushAction); newState = SaveReducer(newState, secondPushAction); @@ -86,8 +82,8 @@ test("Save should remove one update actions from the queue", (t) => { }); test("Save should remove zero update actions from the queue", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(1, 2, 3, tracingId)]; - const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP, tracingId); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(0); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); @@ -95,7 +91,7 @@ test("Save should remove zero update actions from the queue", (t) => { }); test("Save should remove all update actions from the queue (1/2)", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); @@ -103,7 +99,7 @@ test("Save should remove all update actions from the queue (1/2)", (t) => { }); test("Save should remove all update actions from the queue (2/2)", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton", tracingId); + const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 20e9275933c..9e7816bf275 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -14,7 +14,6 @@ import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import dummyUser from "test/fixtures/dummy_user"; import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; -import { tracing as TaskTracing } from "test/fixtures/tasktracing_server_objects"; const { createTreeMapFromTreeArray, generateTreeName } = require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); @@ -71,7 +70,6 @@ test.serial( ], ], TIMESTAMP, - TaskTracing.id, getStats(state.tracing) || undefined, ); // Reset the info field which is just for debugging purposes diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 1784214f72e..3cb1bdb5b10 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -86,7 +86,6 @@ test("SaveSaga should compact multiple updateTracing update actions", (t) => { [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, - tracingId, ); t.deepEqual(compactSaveQueue(saveQueue), [saveQueue[1]]); }); @@ -95,7 +94,7 @@ test("SaveSaga should send update actions", (t) => { [UpdateActions.createEdge(1, 0, 1, tracingId)], [UpdateActions.createEdge(1, 1, 2, tracingId)], ]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); // setLastSaveTimestampAction @@ -131,7 +130,6 @@ test("SaveSaga should send request to server", (t) => { [UpdateActions.createEdge(1, 1, 2, tracingId)], ], TIMESTAMP, - tracingId, ); const saga = sendSaveRequestToServer(); saga.next(); @@ -168,7 +166,6 @@ test("SaveSaga should retry update actions", (t) => { [UpdateActions.createEdge(1, 1, 2, tracingId)], ], TIMESTAMP, - tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); @@ -210,7 +207,6 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => [UpdateActions.createEdge(1, 1, 2, tracingId)], ], TIMESTAMP, - tracingId, ); const saga = sendSaveRequestToServer(); saga.next(); @@ -259,7 +255,7 @@ test("SaveSaga should send update actions right away and try to reach a state wh [UpdateActions.createEdge(1, 0, 1, tracingId)], [UpdateActions.createEdge(1, 1, 2, tracingId)], ]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); @@ -285,7 +281,7 @@ test("SaveSaga should not try to reach state with all actions being saved when s [UpdateActions.createEdge(1, 0, 1, tracingId)], [UpdateActions.createEdge(1, 1, 2, tracingId)], ]; - const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP, tracingId); + const saveQueue = createSaveQueueFromUpdateActions(updateActions, TIMESTAMP); const saga = pushSaveQueueAsync(); expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); saga.next(); @@ -309,7 +305,6 @@ test("SaveSaga should remove the correct update actions", (t) => { [UpdateActions.updateSkeletonTracing(initialState.tracing, [2, 3, 4], [], [0, 0, 1], 2)], ], TIMESTAMP, - tracingId, ); const saga = sendSaveRequestToServer(); saga.next(); @@ -329,7 +324,6 @@ test("SaveSaga should set the correct version numbers", (t) => { [UpdateActions.createEdge(2, 3, 4, tracingId)], ], TIMESTAMP, - tracingId, ); const saga = sendSaveRequestToServer(); saga.next(); @@ -355,7 +349,6 @@ test("SaveSaga should set the correct version numbers if the save queue was comp [UpdateActions.updateSkeletonTracing(initialState.tracing, [3, 4, 5], [], [0, 0, 1], 3)], ], TIMESTAMP, - tracingId, ); const saga = sendSaveRequestToServer(); saga.next(); @@ -383,7 +376,6 @@ test("SaveSaga addVersionNumbers should set the correct version numbers", (t) => ], TIMESTAMP, - tracingId, ); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 3); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 323af891bb6..66d04fc38dc 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -107,7 +107,6 @@ function createCompactedSaveQueueFromUpdateActions( createSaveQueueFromUpdateActions( updateActions.map((batch) => compactUpdateActions(batch, tracing)), timestamp, - tracing.tracingId, stats, ), ); @@ -238,11 +237,7 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { saga.next(newState.flycam); const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.true(withoutUpdateTracing(items).length > 0); - expectValueDeepEqual( - t, - saga.next(items), - put(pushSaveQueueTransaction(items, "skeleton", serverSkeletonTracing.id)), - ); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items, "skeleton"))); }); test("SkeletonTracingSaga should emit createNode update actions", (t) => { const newState = SkeletonTracingReducer(initialState, createNodeAction); @@ -1153,11 +1148,7 @@ test("compactUpdateActions should do nothing if it cannot compact", (t) => { testState.flycam, newState.flycam, ); - const saveQueueOriginal = createSaveQueueFromUpdateActions( - [updateActions], - TIMESTAMP, - skeletonTracing.tracingId, - ); + const saveQueueOriginal = createSaveQueueFromUpdateActions([updateActions], TIMESTAMP); const simplifiedUpdateActions = createCompactedSaveQueueFromUpdateActions( [updateActions], TIMESTAMP, diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index f30a50ea181..c28c7b4fcb4 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -177,11 +177,7 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.is(withoutUpdateTracing(items).length, 0); t.true(items[0].value.activeSegmentId === ACTIVE_CELL_ID); - expectValueDeepEqual( - t, - saga.next(items), - put(pushSaveQueueTransaction(items, "volume", volumeTracing.tracingId)), - ); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items, "volume"))); }); test("VolumeTracingSaga should create a volume layer (saga test)", (t) => { From 8759bb3dbe380a3eeeec4e7cb51464d8d265d1e1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 2 Dec 2024 09:57:30 +0100 Subject: [PATCH 245/361] fix stuck 'Activating Mapping' message --- frontend/javascripts/oxalis/model/sagas/mapping_saga.ts | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 1a0044c4370..b858034c53f 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -362,7 +362,13 @@ function* handleSetMapping( return; } - if (showLoadingIndicator) { + const visibleSegmentationLayerName = yield* select( + (state) => getVisibleSegmentationLayer(state)?.name, + ); + if (showLoadingIndicator && layerName === visibleSegmentationLayerName) { + // Only show the message if the mapping belongs to the currently visible + // segmentation layer. Otherwise, the message would stay as long as the + // actual layer not visible. message.loading({ content: "Activating Mapping", key: MAPPING_MESSAGE_KEY, From e0267c97ff53c2f90ef959e3cb6770e5876aca0d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 2 Dec 2024 11:22:01 +0100 Subject: [PATCH 246/361] do not include name in annotationProto and updateAnnotationMetadata action. make description non-optional, emptystring means empty --- app/controllers/AnnotationIOController.scala | 3 +- .../WKRemoteTracingStoreController.scala | 6 +--- app/models/annotation/AnnotationService.scala | 6 ++-- webknossos-datastore/proto/Annotation.proto | 9 +++--- .../annotation/AnnotationUpdateActions.scala | 10 +++---- .../annotation/AnnotationWithTracings.scala | 4 ++- .../controllers/TSAnnotationController.scala | 28 ++----------------- ...alableminds.webknossos.tracingstore.routes | 1 - 8 files changed, 18 insertions(+), 49 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 36bcd2c71f5..723e1e79633 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -140,8 +140,7 @@ class AnnotationIOController @Inject()( description, ObjectId.generate) annotationProto = AnnotationProto( - name = Some(annotation.name), - description = Some(annotation.description), + description = annotation.description, version = 0L, annotationLayers = annotation.annotationLayers.map(_.toProto), earliestAccessibleVersion = 0L diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 2a1463ff952..6a8a2dba8da 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -73,11 +73,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore _ <- Fox.serialCombined(newLayersProto.filter(l => layerIdsToUpdate.contains(l.tracingId)))(l => annotationLayerDAO.updateName(annotationIdValidated, l.tracingId, l.name)) // Layer stats are ignored here, they are sent eagerly when saving updates - _ <- annotationDAO.updateName(annotationIdValidated, - request.body.name.getOrElse(AnnotationDefaults.defaultName)) - _ <- annotationDAO.updateDescription( - annotationIdValidated, - request.body.description.getOrElse(AnnotationDefaults.defaultDescription)) + _ <- annotationDAO.updateDescription(annotationIdValidated, request.body.description) } yield Ok } } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index b64ce83012d..0081e2f1805 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -276,8 +276,7 @@ class AnnotationService @Inject()( ) } annotationProto = AnnotationProto( - name = Some(AnnotationDefaults.defaultName), - description = Some(AnnotationDefaults.defaultDescription), + description = AnnotationDefaults.defaultDescription, version = 0L, annotationLayers = layersProto, earliestAccessibleVersion = 0L @@ -473,8 +472,7 @@ class AnnotationService @Inject()( description.getOrElse(""), typ = AnnotationType.TracingBase) annotationBaseProto = AnnotationProto( - name = Some(AnnotationDefaults.defaultName), - description = Some(AnnotationDefaults.defaultDescription), + description = AnnotationDefaults.defaultDescription, version = 0L, annotationLayers = annotationLayers.map(_.toProto), earliestAccessibleVersion = 0L diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index c4087124c7d..61513fc9611 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -8,11 +8,10 @@ enum AnnotationLayerTypeProto { } message AnnotationProto { - optional string name = 1; - optional string description = 2; - required int64 version = 3; - repeated AnnotationLayerProto annotationLayers = 4; - required int64 earliestAccessibleVersion = 5; + required string description = 1; // emptystring encodes no description + required int64 version = 2; + repeated AnnotationLayerProto annotationLayers = 3; + required int64 earliestAccessibleVersion = 4; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 5dc0eb5e273..1725bf9109c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -66,11 +66,11 @@ case class UpdateLayerMetadataAnnotationAction(tracingId: String, this.copy(actionAuthorId = authorId) } -case class UpdateMetadataAnnotationAction(name: Option[String], - description: Option[String], - actionTimestamp: Option[Long] = None, - actionAuthorId: Option[String] = None, - info: Option[String] = None) +case class UpdateMetadataAnnotationAction( + description: Option[String], // None means do not change description. Emptystring means set to empty + actionTimestamp: Option[Long] = None, + actionAuthorId: Option[String] = None, + info: Option[String] = None) extends AnnotationUpdateAction with ApplyImmediatelyUpdateAction { override def addTimestamp(timestamp: Long): UpdateAction = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 71f5e483f91..0d17dcfda24 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -102,7 +102,9 @@ case class AnnotationWithTracings( if (l.tracingId == a.tracingId) l.copy(name = a.layerName) else l))) def updateMetadata(a: UpdateMetadataAnnotationAction): AnnotationWithTracings = - this.copy(annotation = annotation.copy(name = a.name, description = a.description)) + a.description.map { newDescription => + this.copy(annotation = annotation.copy(description = newDescription)) + }.getOrElse(this) def withVersion(newVersion: Long): AnnotationWithTracings = { val tracingsUpdated = tracingsById.view.mapValues { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 7f5f7afc577..49341e827b4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -17,8 +17,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ AnnotationTransactionService, ResetToBaseAnnotationAction, TSAnnotationService, - UpdateActionGroup, - UpdateMetadataAnnotationAction + UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings._ @@ -26,17 +25,11 @@ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTraci import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import net.liftweb.common.{Empty, Failure, Full} import play.api.i18n.Messages -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext -case class UpdateAnnotationMetadataParameters(name: Option[String], description: Option[String]) - -object UpdateAnnotationMetadataParameters { - implicit val jsonFormat: OFormat[UpdateAnnotationMetadataParameters] = Json.format[UpdateAnnotationMetadataParameters] -} - class TSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, @@ -71,23 +64,6 @@ class TSAnnotationController @Inject()( } } - def updateMetadata(annotationId: String): Action[UpdateAnnotationMetadataParameters] = - Action.async(validateJson[UpdateAnnotationMetadataParameters]) { implicit request => - log() { - logTime(slackNotificationService.noticeSlowRequest) { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeAnnotation(annotationId)) { - for { - currentVersion <- annotationService.currentMaterializableVersion(annotationId) - _ <- annotationTransactionService.handleSingleUpdateAction( - annotationId, - currentVersion, - UpdateMetadataAnnotationAction(name = request.body.name, description = request.body.description)) - } yield Ok - } - } - } - } - def updateActionLog(annotationId: String, newestVersion: Option[Long] = None, oldestVersion: Option[Long] = None): Action[AnyContent] = Action.async { implicit request => diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index a7fca5c6c7c..93205b4153b 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -8,7 +8,6 @@ GET /health POST /annotation/save @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.save(annotationId: String) GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) -POST /annotation/:annotationId/updateMetadata @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateMetadata(annotationId: String) GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) From 8bf55fa01200f9344d49f04b1b1403215cff10f0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 2 Dec 2024 11:25:59 +0100 Subject: [PATCH 247/361] add version parameter to editableMapping agglomerateIdsForSegments --- .../controllers/EditableMappingController.scala | 10 +++++----- .../com.scalableminds.webknossos.tracingstore.routes | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 656eca8407d..96577b4d404 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -65,21 +65,21 @@ class EditableMappingController @Inject()( } } - def agglomerateIdsForSegments(tracingId: String): Action[ListOfLong] = + def agglomerateIdsForSegments(tracingId: String, version: Option[Long]): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) - currentVersion <- annotationService.currentMaterializableVersion(annotationId) + annotation <- annotationService.get(annotationId, version) + tracing <- annotationService.findVolume(annotationId, tracingId, version) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version = None) + editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( request.body.items.toSet, editableMappingInfo, - currentVersion, + annotation.version, tracingId, remoteFallbackLayer) agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 93205b4153b..2a55b74ecf0 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -36,7 +36,7 @@ POST /volume/mergedFromContents # Editable Mappings GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, version: Option[Long]) POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) From f256b6fed0020b45dfda47a5fb68c2467ba8d46d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 2 Dec 2024 17:06:39 +0100 Subject: [PATCH 248/361] option to count versions --- .../.gitignore | 3 +++ .../main.py | 1 + .../migration.py | 23 ++++++++++++++----- 3 files changed, 21 insertions(+), 6 deletions(-) create mode 100644 tools/migration-unified-annotation-versioning/.gitignore diff --git a/tools/migration-unified-annotation-versioning/.gitignore b/tools/migration-unified-annotation-versioning/.gitignore new file mode 100644 index 00000000000..5e2eef47df9 --- /dev/null +++ b/tools/migration-unified-annotation-versioning/.gitignore @@ -0,0 +1,3 @@ +counts.py +*.log +*.csv diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 12e988fe2e9..33005cfa0d0 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -19,6 +19,7 @@ def main(): parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") parser.add_argument("--previous_start", help="Previous run start time. Example: 2024-11-27 10:37:30.171083", type=str) + parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") args = parser.parse_args() if args.dst is None and not args.dry: parser.error("At least one of --dry or --dst is required") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index cd73d9e4428..36a043c7f53 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -55,14 +55,23 @@ def run(self): log_since(self.before, "Migrating all the things") def migrate_annotation(self, annotation): - logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") before = time.time() try: - mapping_id_map = self.build_mapping_id_map(annotation) - layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) - materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) - self.create_and_save_annotation_proto(annotation, materialized_versions) - log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) + if self.args.count_versions: + versions = 0 + for tracing_id, layer_type in annotation["layers"].items(): + collection = self.update_collection_for_layer_type(layer_type) + newest_version = self.get_newest_version(tracing_id, collection) + versions += newest_version + if versions > 1: + logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") + else: + logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") + mapping_id_map = self.build_mapping_id_map(annotation) + layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) + materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + self.create_and_save_annotation_proto(annotation, materialized_versions) + log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: @@ -388,6 +397,8 @@ def read_annotation_list(self): """ cursor.execute(query) annotations += cursor.fetchall() + if annotation_count != len(annotations): + logger.info(f"Note that only {len(annotations)} of the {annotation_count} annotations have layers. Skipping zero-layer annotations.") log_since(before, "Loading annotation infos from postgres") return annotations From f340b143129011dc7233c2e21d32d757ecb8c707 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 3 Dec 2024 13:18:26 +0100 Subject: [PATCH 249/361] migration: add verbose flag, log also to file --- .../.gitignore | 1 + .../main.py | 1 + .../migration.py | 7 ++++-- .../utils.py | 23 +++++++++++++++---- 4 files changed, 25 insertions(+), 7 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/.gitignore b/tools/migration-unified-annotation-versioning/.gitignore index 5e2eef47df9..7df6b2c7ebf 100644 --- a/tools/migration-unified-annotation-versioning/.gitignore +++ b/tools/migration-unified-annotation-versioning/.gitignore @@ -1,3 +1,4 @@ counts.py *.log *.csv +logs/ diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 33005cfa0d0..2590da14469 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -20,6 +20,7 @@ def main(): parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") parser.add_argument("--previous_start", help="Previous run start time. Example: 2024-11-27 10:37:30.171083", type=str) parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") + parser.add_argument("--verbose", "-v", help="Print for every annotation", action="store_true") args = parser.parse_args() if args.dst is None and not args.dry: parser.error("At least one of --dry or --dst is required") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 36a043c7f53..4dbd39500e2 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -40,6 +40,7 @@ def __init__(self, args): self.failure_count = 0 self.failure_count_lock = threading.Lock() self.total_count = None + self.before = 0 def run(self): self.before = time.time() @@ -66,12 +67,14 @@ def migrate_annotation(self, annotation): if versions > 1: logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") else: - logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") + if self.args.verbose: + logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) self.create_and_save_annotation_proto(annotation, materialized_versions) - log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) + if time.time() - before > 1 or self.args.verbose: + log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index fa288e56c80..71686b8e1a3 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -3,6 +3,8 @@ from typing import Iterator, Tuple import sys from math import floor, ceil +from datetime import datetime +from pathlib import Path logger = logging.getLogger(__name__) @@ -11,11 +13,22 @@ def setup_logging(): root = logging.getLogger() root.setLevel(logging.DEBUG) - handler = logging.StreamHandler(sys.stdout) - handler.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(asctime)s %(levelname)-8s [%(threadName)s] %(message)s") - handler.setFormatter(formatter) - root.addHandler(handler) + formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(threadName)-24s %(message)s") + + stdout_handler = logging.StreamHandler(sys.stdout) + stdout_handler.setLevel(logging.DEBUG) + stdout_handler.setFormatter(formatter) + root.addHandler(stdout_handler) + + time_str = datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f") + + logs_path = Path("logs") + logs_path.mkdir(exist_ok=True) + + file_handler = logging.FileHandler(f"logs/{time_str}.log") + stdout_handler.setLevel(logging.DEBUG) + file_handler.setFormatter(formatter) + root.addHandler(file_handler) def log_since(before, label: str, postfix: str = "") -> None: From 19888f3e3f827a8bf103bf54688e5070c9f753bd Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 3 Dec 2024 13:26:21 +0100 Subject: [PATCH 250/361] add error for missing layers --- tools/migration-unified-annotation-versioning/migration.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 4dbd39500e2..f25a378e9f4 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -53,7 +53,7 @@ def run(self): executor.map(self.migrate_annotation, annotations) if self.failure_count > 0: logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") - log_since(self.before, "Migrating all the things") + log_since(self.before, f"Migrating all the {self.total_count} things") def migrate_annotation(self, annotation): before = time.time() @@ -72,6 +72,8 @@ def migrate_annotation(self, annotation): mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + if len(materialized_versions) == 0: + raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") self.create_and_save_annotation_proto(annotation, materialized_versions) if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) From c8dfaecd56bcb620e1cc4bd7b24742d0cb95c044 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 3 Dec 2024 14:38:46 +0100 Subject: [PATCH 251/361] migration: randomize order with md5 hash --- tools/migration-unified-annotation-versioning/migration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index f25a378e9f4..2247708beb8 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -387,7 +387,7 @@ def read_annotation_list(self): SELECT _id, name, description, created, modified FROM webknossos.annotations WHERE modified < '{start_time}' {previous_start_query} - ORDER BY _id + ORDER BY MD5(_id) LIMIT {page_size} OFFSET {page_size * page_num} ) From 9299f71ae1b94ecc290b73a0714dcb461734e0b9 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 15:27:15 +0100 Subject: [PATCH 252/361] get rid of tracing.version in frontend --- frontend/javascripts/oxalis/default_state.ts | 1 - .../oxalis/model/bucket_data_handling/wkstore_adapter.ts | 2 +- .../oxalis/model/reducers/connectome_reducer.ts | 1 - .../oxalis/model/reducers/skeletontracing_reducer.ts | 7 +------ frontend/javascripts/oxalis/store.ts | 1 - frontend/javascripts/test/libs/nml.spec.ts | 1 - .../test/reducers/skeletontracing_reducer.spec.ts | 1 - .../javascripts/test/sagas/compact_toggle_actions.spec.ts | 1 - .../javascripts/test/sagas/skeletontracing_saga.spec.ts | 1 - frontend/javascripts/types/api_flow_types.ts | 1 - 10 files changed, 2 insertions(+), 15 deletions(-) diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 4ed7810c0c2..7cce34d3b55 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -165,7 +165,6 @@ const defaultState: OxalisState = { boundingBox: null, createdTimestamp: 0, type: "readonly", - version: 0, tracingId: "", additionalAxes: [], }, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 28f56b7e813..f450fb11c38 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -201,7 +201,7 @@ export async function requestFromStore( const resolutionInfo = getMagInfo(layerInfo.resolutions); const version = !isVolumeFallback && isSegmentation && maybeVolumeTracing != null - ? maybeVolumeTracing.version + ? state.tracing.version : null; const bucketInfo = batch.map((zoomedAddress) => createRequestBucketInfo( diff --git a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts index 1a069bbdd02..e013de440f4 100644 --- a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts @@ -75,7 +75,6 @@ function ConnectomeReducer(state: OxalisState, action: Action): OxalisState { trees: {}, treeGroups: [], tracingId: "connectome-tracing-data", - version: 1, boundingBox: null, userBoundingBoxes: [], navigationList: { diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts index 473b07b3c8e..e7d3589d080 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts @@ -106,7 +106,6 @@ function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState trees, treeGroups: action.tracing.treeGroups || [], tracingId: action.tracing.id, - version: action.tracing.version, boundingBox: convertServerBoundingBoxToFrontend(action.tracing.boundingBox), userBoundingBoxes, navigationList: { @@ -442,11 +441,7 @@ function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState return update(state, { tracing: { skeleton: { - $set: update(action.tracing, { - version: { - $set: skeletonTracing.version, - }, - }), + $set: action.tracing, }, }, }); diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index dbbba0a62de..cbb517e42b1 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -209,7 +209,6 @@ export type Annotation = { }; type TracingBase = { readonly createdTimestamp: number; - readonly version: number; readonly tracingId: string; readonly boundingBox: BoundingBoxType | null | undefined; readonly userBoundingBoxes: Array; diff --git a/frontend/javascripts/test/libs/nml.spec.ts b/frontend/javascripts/test/libs/nml.spec.ts index 5506e01c012..c29e53a09a8 100644 --- a/frontend/javascripts/test/libs/nml.spec.ts +++ b/frontend/javascripts/test/libs/nml.spec.ts @@ -45,7 +45,6 @@ const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, cachedMaxNodeId: 7, trees: { "1": { diff --git a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts index de936e44f9e..7081ca59e1e 100644 --- a/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/skeletontracing_reducer.spec.ts @@ -50,7 +50,6 @@ const initialSkeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, trees: {}, treeGroups: [], activeGroupId: null, diff --git a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts index 40928585ea0..f4f17661d20 100644 --- a/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts +++ b/frontend/javascripts/test/sagas/compact_toggle_actions.spec.ts @@ -62,7 +62,6 @@ const createState = (trees: Tree[], _treeGroups: TreeGroup[]): OxalisState => ({ skeleton: { additionalAxes: [], createdTimestamp: 0, - version: 0, tracingId, boundingBox: null, userBoundingBoxes: [], diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index 66d04fc38dc..e5753a8e993 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -116,7 +116,6 @@ const skeletonTracing: SkeletonTracing = { type: "skeleton", createdTimestamp: 0, tracingId: "tracingId", - version: 0, trees: {}, treeGroups: [], activeGroupId: null, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index b3c0cd686bd..749353544a9 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -854,7 +854,6 @@ export type ServerTracingBase = { editPositionAdditionalCoordinates: AdditionalCoordinate[] | null; editRotation: Point3; error?: string; - version: number; zoomLevel: number; additionalAxes: ServerAdditionalAxis[]; }; From ca2767fd6db95c553d31bac79e35036ce7c28297 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 15:27:40 +0100 Subject: [PATCH 253/361] fix linting --- frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index c74cb588d72..abb6df03d2c 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -34,7 +34,6 @@ import _ from "lodash"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; -import { V3 } from "libs/mjs"; import { AnnotationLayerType, type APIAnnotation, From 0113f9ae29b39311261e73c5d4020dda6951071a Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 15:29:01 +0100 Subject: [PATCH 254/361] remove remaining version properties --- .../javascripts/oxalis/model/reducers/volumetracing_reducer.ts | 1 - .../javascripts/test/fixtures/skeletontracing_server_objects.ts | 1 - frontend/javascripts/test/fixtures/tasktracing_server_objects.ts | 1 - .../javascripts/test/fixtures/volumetracing_server_objects.ts | 1 - .../test/sagas/volumetracing/volumetracing_saga.spec.ts | 1 - 5 files changed, 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts index e7ffa4f8da8..fdcab7863f1 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts @@ -262,7 +262,6 @@ export function serverVolumeToClientVolumeTracing(tracing: ServerVolumeTracing): contourList: [], largestSegmentId, tracingId: tracing.id, - version: tracing.version, boundingBox: convertServerBoundingBoxToFrontend(tracing.boundingBox), fallbackLayer: tracing.fallbackLayer, userBoundingBoxes, diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 37879615786..049b4a7d06a 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -158,7 +158,6 @@ export const tracing: ServerSkeletonTracing = { }, additionalAxes: [], zoomLevel: 2, - version: 7, }; export const annotation: APIAnnotation = { description: "", diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 673b1780e7d..55e67c35388 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -63,7 +63,6 @@ export const tracing: ServerSkeletonTracing = { }, additionalAxes: [], zoomLevel: 2, - version: 0, id: "e90133de-b2db-4912-8261-8b6f84f7edab", }; export const annotation: APIAnnotation = { diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index c931ad7a9b6..96e44770de1 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -35,7 +35,6 @@ export const tracing: ServerVolumeTracing = { elementClass: "uint16", id: "segmentation", largestSegmentId: 21890, - version: 0, zoomLevel: 0, mags: [ { diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index c28c7b4fcb4..b26686fa67a 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -53,7 +53,6 @@ const serverVolumeTracing: ServerVolumeTracing = { id: "tracingId", elementClass: "uint32", createdTimestamp: 0, - version: 0, boundingBox: { topLeft: { x: 0, From 94670e571099552c6efa9c4faa52e2c164f4bf6e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 15:34:51 +0100 Subject: [PATCH 255/361] fix save saga spec --- .../javascripts/test/sagas/save_saga.spec.ts | 41 +++---------------- 1 file changed, 6 insertions(+), 35 deletions(-) diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 3cb1bdb5b10..778bfa9ff0f 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -123,6 +123,7 @@ test("SaveSaga should send update actions", (t) => { saga.next(); // select state expectValueDeepEqual(t, saga.next([]), take("PUSH_SAVE_QUEUE_TRANSACTION")); }); + test("SaveSaga should send request to server", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ @@ -134,13 +135,7 @@ test("SaveSaga should send request to server", (t) => { const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); @@ -182,13 +177,7 @@ test("SaveSaga should retry update actions", (t) => { const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); expectValueDeepEqual(t, saga.next(TRACINGSTORE_URL), requestWithTokenCall); saga.throw("Timeout"); @@ -211,13 +200,7 @@ test("SaveSaga should escalate on permanent client error update actions", (t) => const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); const [saveQueueWithVersions, versionIncrement] = addVersionNumbers(saveQueue, LAST_VERSION); t.is(versionIncrement, 2); @@ -328,13 +311,7 @@ test("SaveSaga should set the correct version numbers", (t) => { const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); saga.next(TRACINGSTORE_URL); expectValueDeepEqual(t, saga.next(), put(SaveActions.setVersionNumberAction(LAST_VERSION + 3))); @@ -353,13 +330,7 @@ test("SaveSaga should set the correct version numbers if the save queue was comp const saga = sendSaveRequestToServer(); saga.next(); saga.next(saveQueue); - saga.next([ - { - version: LAST_VERSION, - type: TRACING_TYPE, - tracingId, - }, - ]); + saga.next(LAST_VERSION); saga.next(annotationId); saga.next(TRACINGSTORE_URL); // two of the updateTracing update actions are removed by compactSaveQueue From ca61fb893298a7d81b3ae7f5e73fb3c0ac8a43e5 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 15:53:51 +0100 Subject: [PATCH 256/361] fix skeletontracing saga spec --- .../oxalis/model/sagas/save_saga.ts | 2 +- .../test/sagas/skeletontracing_saga.spec.ts | 55 +++++++++++++++++-- 2 files changed, 52 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 4f04a00d284..499261df72b 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -440,7 +440,7 @@ export function* setupSavingForTracingType( ); if (items.length > 0) { - yield* put(pushSaveQueueTransaction(items, tracingId)); + yield* put(pushSaveQueueTransaction(items)); } prevTracing = tracing; diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index e5753a8e993..dc66fd56a9f 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -131,6 +131,7 @@ const skeletonTracing: SkeletonTracing = { showSkeletons: true, additionalAxes: [], }; + const serverSkeletonTracing: ServerSkeletonTracing = { ...skeletonTracing, id: skeletonTracing.tracingId, @@ -236,7 +237,7 @@ test("SkeletonTracingSaga should do something if changed (saga test)", (t) => { saga.next(newState.flycam); const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.true(withoutUpdateTracing(items).length > 0); - expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items, "skeleton"))); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items))); }); test("SkeletonTracingSaga should emit createNode update actions", (t) => { const newState = SkeletonTracingReducer(initialState, createNodeAction); @@ -268,6 +269,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[0], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, }, @@ -275,6 +277,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 1, }, @@ -282,6 +285,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.deepEqual(updateActions[2], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -300,10 +304,17 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", initialState.flycam, newState.flycam, ); - t.like(updateActions[0], { name: "createTree", value: { id: 2 } }); + t.like(updateActions[0], { + name: "createTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 2, }, @@ -311,6 +322,7 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, }, @@ -333,14 +345,22 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 2, }, }); - t.deepEqual(updateActions[1], { name: "deleteTree", value: { id: 2 } }); + t.deepEqual(updateActions[1], { + name: "deleteTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 1, }, @@ -348,6 +368,7 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[3], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -366,6 +387,7 @@ test("SkeletonTracingSaga should emit a deleteNode update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 1, treeId: 1, }, @@ -386,6 +408,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", nodeId: 2, treeId: 1, }, @@ -393,6 +416,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[1], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 1, source: 1, target: 2, @@ -408,7 +432,13 @@ test("SkeletonTracingSaga should emit a deleteTree update action", (t) => { testState.flycam, newState.flycam, ); - t.like(updateActions[0], { name: "deleteTree", value: { id: 2 } }); + t.like(updateActions[0], { + name: "deleteTree", + value: { + actionTracingId: "tracingId", + id: 2, + }, + }); }); test("SkeletonTracingSaga should emit an updateNode update action", (t) => { const testState = SkeletonTracingReducer(initialState, createNodeAction); @@ -422,6 +452,7 @@ test("SkeletonTracingSaga should emit an updateNode update action", (t) => { t.like(updateActions[0], { name: "updateNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 1, radius: 12, @@ -454,6 +485,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (comments)", t.like(updateActions[0], { name: "updateTree", value: { + actionTracingId: "tracingId", id: 1, comments: [ { @@ -490,6 +522,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (branchpoints t.like(updateActions[0], { name: "updateTree", value: { + actionTracingId: "tracingId", id: 1, branchPoints: [ { @@ -520,6 +553,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 1, nodeId: 1, }, @@ -527,12 +561,14 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[1], { name: "deleteTree", value: { + actionTracingId: "tracingId", id: 1, }, }); t.like(updateActions[2], { name: "createNode", value: { + actionTracingId: "tracingId", id: 1, treeId: 2, }, @@ -540,6 +576,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[3], { name: "createEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 1, @@ -568,12 +605,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[0], { name: "createTree", value: { + actionTracingId: "tracingId", id: 3, }, }); t.like(updateActions[1], { name: "createNode", value: { + actionTracingId: "tracingId", id: 2, treeId: 3, }, @@ -581,12 +620,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[2], { name: "createTree", value: { + actionTracingId: "tracingId", id: 4, }, }); t.like(updateActions[3], { name: "createNode", value: { + actionTracingId: "tracingId", id: 4, treeId: 4, }, @@ -594,6 +635,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[4], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 2, }, @@ -601,6 +643,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[5], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 3, }, @@ -608,6 +651,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[6], { name: "deleteNode", value: { + actionTracingId: "tracingId", treeId: 2, nodeId: 4, }, @@ -615,6 +659,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[7], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 2, target: 3, @@ -623,6 +668,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[8], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 4, @@ -631,6 +677,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[9], { name: "deleteEdge", value: { + actionTracingId: "tracingId", treeId: 2, source: 3, target: 1, From 3be7d8c1b94204cd8dbf2bd8da2d17bcbf679d19 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 17:13:32 +0100 Subject: [PATCH 257/361] mock annotation proto in specs --- .../skeletontracing_server_objects.ts | 19 ++++++++++++++- .../fixtures/tasktracing_server_objects.ts | 18 +++++++++++++- .../fixtures/volumetracing_server_objects.ts | 18 +++++++++++++- .../javascripts/test/helpers/apiHelpers.ts | 24 ++++++++++++++----- 4 files changed, 70 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 049b4a7d06a..79e830c50c8 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -2,7 +2,10 @@ import { type ServerSkeletonTracing, type APIAnnotation, AnnotationLayerType, + APITracingStoreAnnotation, } from "types/api_flow_types"; + +const TRACING_ID = "47e37793-d0be-4240-a371-87ce68561a13"; export const tracing: ServerSkeletonTracing = { typ: AnnotationLayerType.Skeleton, id: "47e37793-d0be-4240-a371-87ce68561a13", @@ -180,7 +183,7 @@ export const annotation: APIAnnotation = { annotationLayers: [ { name: AnnotationLayerType.Skeleton, - tracingId: "47e37793-d0be-4240-a371-87ce68561a13", + tracingId: TRACING_ID, typ: AnnotationLayerType.Skeleton, }, ], @@ -212,3 +215,17 @@ export const annotation: APIAnnotation = { othersMayEdit: false, isLockedByOwner: false, }; + +export const annotationProto: APITracingStoreAnnotation = { + name: "skeleton-annotation-name", + description: "skeleton-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "skeleton layer name", + type: AnnotationLayerType.Skeleton, + }, + ], +}; diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 55e67c35388..3ae0ec8b37f 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -2,8 +2,10 @@ import { type ServerSkeletonTracing, type APIAnnotation, AnnotationLayerType, + APITracingStoreAnnotation, } from "types/api_flow_types"; +const TRACING_ID = "e90133de-b2db-4912-8261-8b6f84f7edab"; export const tracing: ServerSkeletonTracing = { typ: "Skeleton", trees: [ @@ -122,9 +124,10 @@ export const annotation: APIAnnotation = { allowDownload: true, }, annotationLayers: [ + // does this still exist? { name: "Skeleton", - tracingId: "e90133de-b2db-4912-8261-8b6f84f7edab", + tracingId: TRACING_ID, typ: AnnotationLayerType.Skeleton, }, ], @@ -180,3 +183,16 @@ export const annotation: APIAnnotation = { }, ], }; +export const annotationProto: APITracingStoreAnnotation = { + name: "task-annotation-name", + description: "task-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "Skeleton", + type: AnnotationLayerType.Skeleton, + }, + ], +}; diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index 96e44770de1..fdd591bd0d3 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -2,7 +2,10 @@ import { type ServerVolumeTracing, type APIAnnotation, AnnotationLayerType, + APITracingStoreAnnotation, } from "types/api_flow_types"; + +const TRACING_ID = "tracingId-1234"; export const tracing: ServerVolumeTracing = { typ: "Volume", activeSegmentId: 10000, @@ -89,7 +92,7 @@ export const annotation: APIAnnotation = { annotationLayers: [ { name: "volume", - tracingId: "tracingId-1234", + tracingId: TRACING_ID, typ: AnnotationLayerType.Volume, }, ], @@ -121,3 +124,16 @@ export const annotation: APIAnnotation = { othersMayEdit: false, isLockedByOwner: false, }; +export const annotationProto: APITracingStoreAnnotation = { + name: "volume-annotation-name", + description: "volume-annotation-description", + version: 1, + earliestAccessibleVersion: 0, + annotationLayers: [ + { + tracingId: TRACING_ID, + name: "volume", + type: AnnotationLayerType.Volume, + }, + ], +}; diff --git a/frontend/javascripts/test/helpers/apiHelpers.ts b/frontend/javascripts/test/helpers/apiHelpers.ts index 8de61c806bf..901b502d0a5 100644 --- a/frontend/javascripts/test/helpers/apiHelpers.ts +++ b/frontend/javascripts/test/helpers/apiHelpers.ts @@ -1,4 +1,3 @@ -// @ts-nocheck import { createNanoEvents } from "nanoevents"; import type { ExecutionContext } from "ava"; import _ from "lodash"; @@ -13,16 +12,20 @@ import { setSceneController } from "oxalis/controller/scene_controller_provider" import { tracing as SKELETON_TRACING, annotation as SKELETON_ANNOTATION, + annotationProto as SKELETON_ANNOTATION_PROTO, } from "../fixtures/skeletontracing_server_objects"; import { tracing as TASK_TRACING, annotation as TASK_ANNOTATION, + annotationProto as TASK_ANNOTATION_PROTO, } from "../fixtures/tasktracing_server_objects"; import { tracing as VOLUME_TRACING, annotation as VOLUME_ANNOTATION, + annotationProto as VOLUME_ANNOTATION_PROTO, } from "../fixtures/volumetracing_server_objects"; import DATASET from "../fixtures/dataset_server_object"; +import type { ApiInterface } from "oxalis/api/api_latest"; const Request = { receiveJSON: sinon.stub(), @@ -32,8 +35,8 @@ const Request = { sendJSONReceiveArraybufferWithHeaders: sinon.stub(), always: () => Promise.resolve(), }; -export function createBucketResponseFunction(TypedArrayClass, fillValue, delay = 0) { - return async function getBucketData(_url, payload) { +export function createBucketResponseFunction(TypedArrayClass: any, fillValue: number, delay = 0) { + return async function getBucketData(_url: string, payload: { data: Array }) { const bucketCount = payload.data.length; await sleep(delay); return { @@ -46,6 +49,7 @@ export function createBucketResponseFunction(TypedArrayClass, fillValue, delay = }; } +// @ts-ignore Request.sendJSONReceiveArraybufferWithHeaders = createBucketResponseFunction(Uint8Array, 0); const ErrorHandling = { assertExtendContext: _.noop, @@ -58,6 +62,7 @@ const app = { }; const protoHelpers = { parseProtoTracing: sinon.stub(), + parseProtoAnnotation: sinon.stub(), }; export const TIMESTAMP = 1494695001688; const DateMock = { @@ -125,14 +130,17 @@ const modelData = { skeleton: { tracing: SKELETON_TRACING, annotation: SKELETON_ANNOTATION, + annotationProto: SKELETON_ANNOTATION_PROTO, }, volume: { tracing: VOLUME_TRACING, annotation: VOLUME_ANNOTATION, + annotationProto: VOLUME_ANNOTATION_PROTO, }, task: { tracing: TASK_TRACING, annotation: TASK_ANNOTATION, + annotationProto: TASK_ANNOTATION_PROTO, }, }; @@ -199,6 +207,9 @@ export function __setupOxalis( // each __setupOxalis call would overwrite the current stub to receiveJSON. .onCall(counter++) .returns(Promise.resolve(datasetClone)); + + console.log("mock parseProtoAnnotation with", modelData[mode].annotationProto); + protoHelpers.parseProtoAnnotation.returns(_.cloneDeep(modelData[mode].annotationProto)); protoHelpers.parseProtoTracing.returns(_.cloneDeep(modelData[mode].tracing)); Request.receiveJSON .withArgs("/api/userToken/generate", { @@ -219,11 +230,12 @@ export function __setupOxalis( setSceneController({ name: "This is a dummy scene controller so that getSceneController works in the tests.", + // @ts-ignore segmentMeshController: { meshesGroupsPerSegmentId: {} }, }); return Model.fetch( - ANNOTATION_TYPE, + null, // no compound annotation { annotationId: ANNOTATION_ID, type: ControlModeEnum.TRACE, @@ -233,11 +245,11 @@ export function __setupOxalis( .then(() => { // Trigger the event ourselves, as the OxalisController is not instantiated app.vent.emit("webknossos:ready"); - webknossos.apiReady(apiVersion).then((apiObject) => { + webknossos.apiReady(apiVersion).then((apiObject: ApiInterface) => { t.context.api = apiObject; }); }) - .catch((error) => { + .catch((error: { message: string }) => { console.error("model.fetch() failed", error); t.fail(error.message); }); From 4d870cbb25de7a9278679c940ad0cbe4d7afe584 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 17:27:45 +0100 Subject: [PATCH 258/361] fix wkstore adapter spec --- frontend/javascripts/test/helpers/apiHelpers.ts | 1 - .../test/model/binary/layers/wkstore_adapter.spec.ts | 11 +++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/test/helpers/apiHelpers.ts b/frontend/javascripts/test/helpers/apiHelpers.ts index 901b502d0a5..b24e5401d52 100644 --- a/frontend/javascripts/test/helpers/apiHelpers.ts +++ b/frontend/javascripts/test/helpers/apiHelpers.ts @@ -208,7 +208,6 @@ export function __setupOxalis( .onCall(counter++) .returns(Promise.resolve(datasetClone)); - console.log("mock parseProtoAnnotation with", modelData[mode].annotationProto); protoHelpers.parseProtoAnnotation.returns(_.cloneDeep(modelData[mode].annotationProto)); protoHelpers.parseProtoTracing.returns(_.cloneDeep(modelData[mode].tracing)); Request.receiveJSON diff --git a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts index 509469042b2..a00c2ce1dea 100644 --- a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts +++ b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts @@ -8,6 +8,7 @@ import sinon from "sinon"; import test from "ava"; import { MagInfo } from "oxalis/model/helpers/mag_info"; import type { APIDataLayer } from "types/api_flow_types"; +import type { PushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; const RequestMock = { always: (promise: Promise, func: (v: any) => any) => promise.then(func, func), @@ -21,8 +22,10 @@ function setFourBit(bool: boolean) { _fourBit = bool; } +const tracingId = "tracingId"; const mockedCube = { isSegmentation: true, + layerName: tracingId, magInfo: new MagInfo([ [1, 1, 1], [2, 2, 2], @@ -232,6 +235,7 @@ test.serial( setFourBit(false); }, ); + test.serial("sendToStore: Request Handling should send the correct request parameters", (t) => { const data = new Uint8Array(2); const bucket1 = new DataBucket("uint8", [0, 0, 0, 0], null, mockedCube); @@ -243,13 +247,13 @@ test.serial("sendToStore: Request Handling should send the correct request param const batch = [bucket1, bucket2]; const getBucketData = sinon.stub(); getBucketData.returns(data); - const tracingId = "tracingId"; - const expectedSaveQueueItems = { + const expectedSaveQueueItems: PushSaveQueueTransaction = { type: "PUSH_SAVE_QUEUE_TRANSACTION", items: [ { name: "updateBucket", value: { + actionTracingId: tracingId, position: [0, 0, 0], additionalCoordinates: undefined, mag: [1, 1, 1], @@ -260,6 +264,7 @@ test.serial("sendToStore: Request Handling should send the correct request param { name: "updateBucket", value: { + actionTracingId: tracingId, position: [64, 64, 64], additionalCoordinates: undefined, mag: [2, 2, 2], @@ -269,8 +274,6 @@ test.serial("sendToStore: Request Handling should send the correct request param }, ], transactionId: "dummyRequestId", - saveQueueType: "volume", - tracingId, }; const pushQueue = new PushQueue({ ...mockedCube, layerName: tracingId }); From dd9dadc7d3624d4feba68a0555bae8aef523959b Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 17:35:16 +0100 Subject: [PATCH 259/361] fix more incorrectly passed transactionIds; fix save_reducer spec --- .../oxalis/model/actions/save_actions.ts | 6 ++--- .../oxalis/model/sagas/annotation_saga.tsx | 5 +--- .../oxalis/model/sagas/proofread_saga.ts | 6 ++--- .../oxalis/model/sagas/volumetracing_saga.tsx | 5 +--- .../javascripts/oxalis/view/version_list.tsx | 7 +---- .../test/reducers/save_reducer.spec.ts | 27 +++++++------------ .../volumetracing/volumetracing_saga.spec.ts | 2 +- 7 files changed, 19 insertions(+), 39 deletions(-) diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 3ffd784b4b4..94ecb79d769 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -41,22 +41,20 @@ export type SaveAction = // From this point on, we can assume that the groups fulfil the isolation requirement. export const pushSaveQueueTransaction = ( items: Array, - transactionId: string = getUid(), ): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items, - transactionId, + transactionId: getUid(), }) as const; export const pushSaveQueueTransactionIsolated = ( item: UpdateActionWithIsolationRequirement, - transactionId: string = getUid(), ): PushSaveQueueTransaction => ({ type: "PUSH_SAVE_QUEUE_TRANSACTION", items: [item], - transactionId, + transactionId: getUid(), }) as const; export const saveNowAction = () => diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index dd5a8f051c5..f8d66ae28a2 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -118,10 +118,7 @@ export function* pushAnnotationUpdateAsync(action: Action) { function* pushAnnotationLayerUpdateAsync(action: EditAnnotationLayerAction): Saga { const { tracingId, layerProperties } = action; yield* put( - pushSaveQueueTransaction( - [updateAnnotationLayerName(tracingId, layerProperties.name)], - tracingId, - ), + pushSaveQueueTransaction([updateAnnotationLayerName(tracingId, layerProperties.name)]), ); } diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index cc0730d0bd0..b3c1e67dbcd 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -455,7 +455,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { return; } - yield* put(pushSaveQueueTransaction(items, volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { @@ -793,7 +793,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); if (action.type === "MIN_CUT_AGGLOMERATE") { @@ -951,7 +951,7 @@ function* handleProofreadCutFromNeighbors(action: Action) { return; } - yield* put(pushSaveQueueTransaction(items, volumeTracingId)); + yield* put(pushSaveQueueTransaction(items)); yield* call([Model, Model.ensureSavedState]); // Now that the changes are saved, we can split the mapping locally (because it requires diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index cdd7b8ff65b..ece2a109a33 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -955,10 +955,7 @@ function* handleDeleteSegmentData(): Saga { yield* put(setBusyBlockingInfoAction(true, "Segment is being deleted.")); yield* put( - pushSaveQueueTransaction( - [deleteSegmentDataVolumeAction(action.segmentId, action.layerName)], - action.layerName, - ), + pushSaveQueueTransaction([deleteSegmentDataVolumeAction(action.segmentId, action.layerName)]), ); yield* call([Model, Model.ensureSavedState]); diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 3c878c2b624..0daf207db57 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -72,12 +72,7 @@ async function handleRestoreVersion( if (props.allowUpdate) { const newestVersion = _.max(versions.map((batch) => batch.version)) || 0; Store.dispatch(setVersionNumberAction(newestVersion)); - Store.dispatch( - pushSaveQueueTransactionIsolated( - revertToVersion(version), - "experimental; leaving out tracingId as this should not be required", - ), - ); + Store.dispatch(pushSaveQueueTransactionIsolated(revertToVersion(version))); await Model.ensureSavedState(); Store.dispatch(setVersionRestoreVisibilityAction(false)); Store.dispatch(setAnnotationAllowUpdateAction(true)); diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index ce19e3b3197..907fee27b34 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -14,7 +14,7 @@ const AccessorMock = { getStats: () => null, }; mockRequire("libs/date", DateMock); -mockRequire("oxalis/model/accessors/skeletontracing_accessor", AccessorMock); +mockRequire("oxalis/model/accessors/annotation_accessor", AccessorMock); const SaveActions = mockRequire.reRequire( "oxalis/model/actions/save_actions", @@ -37,12 +37,11 @@ const initialState = { totalActionCount: 0, }, }, - tracing: {}, } as any as OxalisState; test("Save should add update actions to the queue", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, saveQueue); }); @@ -52,19 +51,13 @@ test("Save should add more update actions to the queue", (t) => { createEdge(treeId, 2, 3, tracingId), ]; const saveQueue = createSaveQueueFromUpdateActions([getItems(0), getItems(1)], TIMESTAMP); - const testState = SaveReducer( - initialState, - SaveActions.pushSaveQueueTransaction(getItems(0), "skeleton"), - ); - const newState = SaveReducer( - testState, - SaveActions.pushSaveQueueTransaction(getItems(1), "skeleton"), - ); + const testState = SaveReducer(initialState, SaveActions.pushSaveQueueTransaction(getItems(0))); + const newState = SaveReducer(testState, SaveActions.pushSaveQueueTransaction(getItems(1))); t.deepEqual(newState.save.queue, saveQueue); }); test("Save should add zero update actions to the queue", (t) => { const items: UpdateActionWithoutIsolationRequirement[] = []; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const newState = SaveReducer(initialState, pushAction); t.deepEqual(newState.save.queue, []); }); @@ -72,8 +65,8 @@ test("Save should remove one update actions from the queue", (t) => { const firstItem = [createEdge(0, 1, 2, tracingId)]; const secondItem = [createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([secondItem], TIMESTAMP); - const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem, "skeleton"); - const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem, "skeleton"); + const firstPushAction = SaveActions.pushSaveQueueTransaction(firstItem); + const secondPushAction = SaveActions.pushSaveQueueTransaction(secondItem); const popAction = SaveActions.shiftSaveQueueAction(1); let newState = SaveReducer(initialState, firstPushAction); newState = SaveReducer(newState, secondPushAction); @@ -83,7 +76,7 @@ test("Save should remove one update actions from the queue", (t) => { test("Save should remove zero update actions from the queue", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(1, 2, 3, tracingId)]; const saveQueue = createSaveQueueFromUpdateActions([items], TIMESTAMP); - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const popAction = SaveActions.shiftSaveQueueAction(0); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); @@ -91,7 +84,7 @@ test("Save should remove zero update actions from the queue", (t) => { }); test("Save should remove all update actions from the queue (1/2)", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const popAction = SaveActions.shiftSaveQueueAction(2); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); @@ -99,7 +92,7 @@ test("Save should remove all update actions from the queue (1/2)", (t) => { }); test("Save should remove all update actions from the queue (2/2)", (t) => { const items = [createEdge(0, 1, 2, tracingId), createEdge(0, 2, 3, tracingId)]; - const pushAction = SaveActions.pushSaveQueueTransaction(items, "skeleton"); + const pushAction = SaveActions.pushSaveQueueTransaction(items); const popAction = SaveActions.shiftSaveQueueAction(5); let newState = SaveReducer(initialState, pushAction); newState = SaveReducer(newState, popAction); diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index b26686fa67a..f743842fdc2 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -176,7 +176,7 @@ test("VolumeTracingSaga should do something if changed (saga test)", (t) => { const items = execCall(t, saga.next(newState.viewModeData.plane.tdCamera)); t.is(withoutUpdateTracing(items).length, 0); t.true(items[0].value.activeSegmentId === ACTIVE_CELL_ID); - expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items, "volume"))); + expectValueDeepEqual(t, saga.next(items), put(pushSaveQueueTransaction(items))); }); test("VolumeTracingSaga should create a volume layer (saga test)", (t) => { From d08621d4bc50a36eff720d31e7900a8392597293 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 17:54:37 +0100 Subject: [PATCH 260/361] fix last frontend tests :tm: --- .../oxalis/model/helpers/generate_dummy_trees.ts | 4 ++-- frontend/javascripts/oxalis/model_initialization.ts | 3 --- .../test/backend-snapshot-tests/annotations.e2e.ts | 4 ++-- frontend/javascripts/test/sagas/saga_integration.spec.ts | 6 ++++-- 4 files changed, 8 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts b/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts index 3ed8d9b4174..886a02d9c60 100644 --- a/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts +++ b/frontend/javascripts/oxalis/model/helpers/generate_dummy_trees.ts @@ -15,7 +15,7 @@ export default function generateDummyTrees( function generateDummyTree(): ServerSkeletonTracingTree { const nodes = []; const edges = []; - let counter = -1; + let counter = 0; const initialNodeId = currentNewNodeId; while (counter++ < nodeCount) { @@ -43,7 +43,7 @@ export default function generateDummyTrees( counter = 0; - while (counter++ < nodeCount) { + while (counter++ < nodeCount - 1) { edges.push({ source: initialNodeId + counter, target: initialNodeId + counter - 1, diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index d8dd1cf3606..3cba36a1d80 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -354,9 +354,6 @@ function initializeAnnotation( const skeletonTracing = getNullableSkeletonTracing(serverTracings); if (skeletonTracing != null) { - // To generate a huge amount of dummy trees, use: - // import generateDummyTrees from "./model/helpers/generate_dummy_trees"; - // tracing.trees = generateDummyTrees(1, 200000); Store.dispatch(initializeSkeletonTracingAction(skeletonTracing)); } Store.dispatch(setVersionNumberAction(version)); diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 64b0c7c9d23..e02e592944f 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -178,7 +178,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { }); test("Send complex update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); - const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 5)); + const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); const treeGroups = [ { groupId: 1, @@ -214,7 +214,7 @@ test("Send complex update actions and compare resulting tracing", async (t) => { test("Update Metadata for Skeleton Tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); - const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 5)); + const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); const createTreesUpdateActions = Array.from(diffTrees("someTracingId", {}, trees)); const metadata = [ { diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 9e7816bf275..ca21bf98201 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -85,7 +85,9 @@ test.serial( test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); t.deepEqual(Store.getState().save.queue, []); - const trees = generateDummyTrees(1000, 1); + // This will create 250 trees with one node each. Thus, 500 update actions will + // be sent to the server (two per node). + const trees = generateDummyTrees(250, 1); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); t.is(Store.getState().save.queue.length, 1); t.true(Store.getState().save.queue[0].actions.length < MAXIMUM_ACTION_COUNT_PER_BATCH); @@ -94,7 +96,7 @@ test.serial("Save actions should not be chunked below the chunk limit (1/3)", (t test.serial("Save actions should be chunked above the chunk limit (2/3)", (t) => { Store.dispatch(discardSaveQueuesAction()); t.deepEqual(Store.getState().save.queue, []); - const trees = generateDummyTrees(5000, 1); + const trees = generateDummyTrees(5000, 2); Store.dispatch(addTreesAndGroupsAction(createTreeMapFromTreeArray(trees), [])); const state = Store.getState(); t.true(state.save.queue.length > 1); From 0d48f4a99fd59aff29f0f05aaca88c0482197a52 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Dec 2024 18:03:02 +0100 Subject: [PATCH 261/361] fix linting --- .../javascripts/test/fixtures/skeletontracing_server_objects.ts | 2 +- .../javascripts/test/fixtures/tasktracing_server_objects.ts | 2 +- .../javascripts/test/fixtures/volumetracing_server_objects.ts | 2 +- frontend/javascripts/test/sagas/save_saga.spec.ts | 1 - 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 79e830c50c8..018ad315d4f 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -2,7 +2,7 @@ import { type ServerSkeletonTracing, type APIAnnotation, AnnotationLayerType, - APITracingStoreAnnotation, + type APITracingStoreAnnotation, } from "types/api_flow_types"; const TRACING_ID = "47e37793-d0be-4240-a371-87ce68561a13"; diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 3ae0ec8b37f..165ccd7da05 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -2,7 +2,7 @@ import { type ServerSkeletonTracing, type APIAnnotation, AnnotationLayerType, - APITracingStoreAnnotation, + type APITracingStoreAnnotation, } from "types/api_flow_types"; const TRACING_ID = "e90133de-b2db-4912-8261-8b6f84f7edab"; diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index fdd591bd0d3..63e8ae2141f 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -2,7 +2,7 @@ import { type ServerVolumeTracing, type APIAnnotation, AnnotationLayerType, - APITracingStoreAnnotation, + type APITracingStoreAnnotation, } from "types/api_flow_types"; const TRACING_ID = "tracingId-1234"; diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 778bfa9ff0f..4810ea96dc8 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -78,7 +78,6 @@ const initialState = { }; const LAST_VERSION = 2; const TRACINGSTORE_URL = "test.webknossos.xyz"; -const TRACING_TYPE = "skeleton"; test("SaveSaga should compact multiple updateTracing update actions", (t) => { const saveQueue = createSaveQueueFromUpdateActions( [ From 656561a9861bc790a8e52fbef1f97e14e9ced114 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 3 Dec 2024 18:21:27 +0100 Subject: [PATCH 262/361] format backend --- .../services/MeshMappingHelper.scala | 4 ++-- .../controllers/VolumeTracingController.scala | 20 ++++++++++++++++--- .../EditableMappingLayer.scala | 7 ++++--- 3 files changed, 23 insertions(+), 8 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index 74e78e48b1d..e5831d8a580 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -22,8 +22,8 @@ trait MeshMappingHelper { editableMappingTracingId: Option[String], agglomerateId: Long, mappingNameForMeshFile: Option[String], - omitMissing: Boolean, // If true, failing lookups in the agglomerate file will just return empty list. - )(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Long]] = + omitMissing: Boolean // If true, failing lookups in the agglomerate file will just return empty list. + )(implicit ec: ExecutionContext, tc: TokenContext): Fox[List[Long]] = (targetMappingName, editableMappingTracingId) match { case (None, None) => // No mapping selected, assume id matches meshfile diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index bec3799ad48..ed4223553f5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -8,9 +8,18 @@ import com.scalableminds.util.tools.JsonHelper.{boxFormat, optionFormat} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.controllers.Controller import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto -import com.scalableminds.webknossos.datastore.helpers.{GetSegmentIndexParameters, ProtoGeometryImplicits, SegmentStatisticsParameters} +import com.scalableminds.webknossos.datastore.helpers.{ + GetSegmentIndexParameters, + ProtoGeometryImplicits, + SegmentStatisticsParameters +} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} -import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize, WebknossosAdHocMeshRequest, WebknossosDataRequest} +import com.scalableminds.webknossos.datastore.models.{ + LengthUnit, + VoxelSize, + WebknossosAdHocMeshRequest, + WebknossosDataRequest +} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{FullMeshRequest, UserAccessRequest} import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} @@ -18,7 +27,12 @@ import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotifi import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume._ import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingId, TracingSelector} -import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig} +import com.scalableminds.webknossos.tracingstore.{ + TSRemoteDatastoreClient, + TSRemoteWebknossosClient, + TracingStoreAccessTokenService, + TracingStoreConfig +} import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 1f320a38d59..b1140ca2857 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -36,9 +36,10 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP remoteFallbackLayer <- layer.editableMappingService .remoteFallbackLayerFromVolumeTracing(layer.tracing, layer.tracingId) // called here to ensure updates are applied - editableMappingInfo <- layer.annotationService.findEditableMappingInfo(layer.annotationId, - tracingId, - Some(layer.version))(ec, layer.tokenContext) + editableMappingInfo <- layer.annotationService.findEditableMappingInfo( + layer.annotationId, + tracingId, + Some(layer.version))(ec, layer.tokenContext) dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, From a0d536b77611afb4ab5b403bcbc31e95f1adc07f Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 4 Dec 2024 10:21:11 +0100 Subject: [PATCH 263/361] WIP: migration: merge updates in a less naive way --- .../.gitignore | 1 + .../migration.py | 67 +++++++++++++++++-- .../utils.py | 3 +- 3 files changed, 62 insertions(+), 9 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/.gitignore b/tools/migration-unified-annotation-versioning/.gitignore index 7df6b2c7ebf..f18dc3a725d 100644 --- a/tools/migration-unified-annotation-versioning/.gitignore +++ b/tools/migration-unified-annotation-versioning/.gitignore @@ -2,3 +2,4 @@ counts.py *.log *.csv logs/ +*.dat diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 2247708beb8..94a0d9e82b3 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -71,6 +71,7 @@ def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) + return materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") @@ -94,7 +95,7 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map[tracing_id] = editable_mapping_id return mapping_id_map - def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + def migrate_updates_concat(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() batch_size = 1000 @@ -107,7 +108,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers for batch_start, batch_end in batch_range(newest_version, batch_size): update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) for version, update_group in update_groups: - update_group = self.process_update_group(tracing_id, layer_type, update_group, json_encoder, json_decoder) + update_group, _, _ = self.process_update_group(tracing_id, layer_type, update_group, json_encoder, json_decoder) unified_version += 1 version_mapping_for_layer[version] = unified_version self.save_update_group(annotation['_id'], unified_version, update_group) @@ -127,6 +128,49 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers # TODO handle existing revertToVersion update actions return version_mapping + def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> List[bytes]: + batch_size = 1000 + newest_version = self.get_newest_version(tracing_or_mapping_id, collection) + updates_for_layer = [] + next_version = newest_version + for batch_start, batch_end in track(reversed(list(batch_range(newest_version, batch_size))), total=(newest_version // batch_size), description=f"Fetching updates ..."): + if batch_start > next_version: + continue + update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end) + for version, update_group in reversed(update_groups): + if version > next_version: + continue + update_group, timestamp, revert_source_version = self.process_update_group(tracing_or_mapping_id, layer_type, update_group, json_encoder, json_decoder) + if revert_source_version is not None: + next_version = revert_source_version + else: + next_version -= 1 + # todo save actionTimestamp + updates_for_layer.append(update_group) + logger.info(f"yielding {len(updates_for_layer)} updates") + updates_for_layer.reverse() + return updates_for_layer + + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + json_encoder = msgspec.json.Encoder() + json_decoder = msgspec.json.Decoder() + + updates_per_layer = {} + for tracing_id, layer_type in annotation["layers"].items(): + collection = self.update_collection_for_layer_type(layer_type) + updates_per_layer[tracing_id] = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + if tracing_id in mapping_id_map: + editable_mapping_id = mapping_id_map[tracing_id] + updates_per_layer[editable_mapping_id] = self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) + + unified_version = 0 + version_mapping = {} + + # TODO merge the updates lists by timestamp, store updates, build version mapping + + logger.info(len(updates_per_layer)) + return version_mapping + def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: if layer_type == "Skeleton": return None @@ -147,10 +191,11 @@ def get_newest_tracing_raw(self, tracing_id, collection) -> Optional[bytes]: return getReply.value return None - def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes, json_encoder, json_decoder) -> bytes: + def process_update_group(self, tracing_id: str, layer_type: str, update_group_raw: bytes, json_encoder, json_decoder) -> Tuple[bytes, int, Optional[int]]: update_group_parsed = json_decoder.decode(update_group_raw) - # TODO handle existing revertToVersion update actions + revert_source_version = None + action_timestamp = 0 for update in update_group_parsed: name = update["name"] @@ -181,7 +226,14 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra or (name == "updateMappingName" and "mappingName" not in update_value): update["isCompacted"] = True - return json_encoder.encode(update_group_parsed) + if name == "revertToVersion": + revert_source_version = update_value["sourceVersion"] + logger.info(f"revertToVersion! source: {revert_source_version}") + + if "actionTimestamp" in update_value is not None: + action_timestamp = update_value["actionTimestamp"] + + return json_encoder.encode(update_group_parsed), action_timestamp, revert_source_version def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) @@ -385,8 +437,9 @@ def read_annotation_list(self): query = f""" WITH annotations AS ( SELECT _id, name, description, created, modified FROM webknossos.annotations - WHERE modified < '{start_time}' - {previous_start_query} + -- WHERE modified < '{start_time}' + -- {previous_start_query} + WHERE _id = '5dc6d29d01000008d7fcb983' ORDER BY MD5(_id) LIMIT {page_size} OFFSET {page_size * page_num} diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index 71686b8e1a3..b2cd81e092c 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -36,14 +36,13 @@ def log_since(before, label: str, postfix: str = "") -> None: logger.info(f"{label} took {humanize_time_diff(diff)}{postfix}") -# TODO should we go to limit + 1? def batch_range( limit: int, batch_size: int ) -> Iterator[Tuple[int, int]]: full_range = range(limit) for i in range(full_range.start, full_range.stop, batch_size): - yield (i, min(i + batch_size, full_range.stop)) + yield i, min(i + batch_size, full_range.stop) if i + batch_size >= full_range.stop: return From ca1ce9f90dbf830acb71563947ca60a0dacd1a47 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 4 Dec 2024 10:26:14 +0100 Subject: [PATCH 264/361] incorporate version parameter in agglomeratesForSegments route; refactor some GET parameters in api module --- frontend/javascripts/admin/admin_rest_api.ts | 79 ++++++++++++-------- 1 file changed, 48 insertions(+), 31 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index a0fa2d41430..293e7d21e9a 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -725,17 +725,21 @@ export async function getTracingForAnnotationType( ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const tracingType = typ.toLowerCase() as "skeleton" | "volume"; - const possibleVersionString = version != null ? `&version=${version}` : ""; - const tracingArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?token=${token}${possibleVersionString}`, + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } + const tracingArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${annotation.tracingStore.url}/tracings/${tracingType}/${tracingId}?${params}`, { headers: { Accept: "application/x-protobuf", }, }, - ), - ); + ); + }); const tracing = parseProtoTracing(tracingArrayBuffer, tracingType); if (!process.env.IS_TESTING) { @@ -794,17 +798,21 @@ export async function getAnnotationProto( annotationId: string, version?: number | null | undefined, ): Promise { - const possibleVersionString = version != null ? `&version=${version}` : ""; - const annotationArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/annotation/${annotationId}?token=${token}${possibleVersionString}`, + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } + const annotationArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/annotation/${annotationId}?${params}`, { headers: { Accept: "application/x-protobuf", }, }, - ), - ); + ); + }); return parseProtoAnnotation(annotationArrayBuffer); } @@ -887,9 +895,9 @@ export async function downloadAnnotation( downloadFileFormat: "zarr3" | "wkw" | "nml" = "wkw", includeVolumeData: boolean = true, ) { - const searchParams = new URLSearchParams(); + const params = new URLSearchParams(); if (version != null) { - searchParams.append("version", version.toString()); + params.append("version", version.toString()); } if (includeVolumeData && showVolumeFallbackDownloadWarning) { @@ -898,17 +906,17 @@ export async function downloadAnnotation( }); } if (!includeVolumeData) { - searchParams.append("skipVolumeData", "true"); + params.append("skipVolumeData", "true"); } else { if (downloadFileFormat === "nml") { throw new Error( "Cannot download annotation with nml-only format while includeVolumeData is true", ); } - searchParams.append("volumeDataZipFormat", downloadFileFormat); + params.append("volumeDataZipFormat", downloadFileFormat); } - const downloadUrl = `/api/annotations/${annotationType}/${annotationId}/download?${searchParams}`; + const downloadUrl = `/api/annotations/${annotationType}/${annotationId}/download?${params}`; await downloadWithFilename(downloadUrl); } @@ -1300,17 +1308,20 @@ export async function triggerDatasetClearCache( dataSourceId: APIDataSourceId, layerName?: string, ): Promise { - await doWithToken((token) => - Request.triggerRequest( - `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?token=${token}${ - layerName ? `&layerName=${layerName}` : "" - }`, + await doWithToken((token) => { + const params = new URLSearchParams(); + params.append("token", token); + if (layerName) { + params.append("layerName", layerName); + } + return Request.triggerRequest( + `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?${params}`, { host: datastoreHost, method: "POST", }, - ), - ); + ); + }); } export async function deleteDatasetOnDisk( @@ -1448,14 +1459,14 @@ export function getPositionForSegmentInAgglomerate( segmentId: number, ): Promise { return doWithToken(async (token) => { - const urlParams = new URLSearchParams({ + const params = new URLSearchParams({ token, segmentId: `${segmentId}`, }); const position = await Request.receiveJSON( `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${ dataSourceId.directoryName - }/layers/${layerName}/agglomerates/${mappingName}/positionForSegment?${urlParams.toString()}`, + }/layers/${layerName}/agglomerates/${mappingName}/positionForSegment?${params.toString()}`, ); return position; }); @@ -1914,11 +1925,17 @@ export async function getAgglomeratesForSegmentsFromDatastore, + version?: number | null | undefined, ): Promise { + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } const segmentIdBuffer = serializeProtoListOfLong(segmentIds); - const listArrayBuffer: ArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?token=${token}`, + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?${params}`, { method: "POST", body: segmentIdBuffer, @@ -1926,8 +1943,8 @@ export async function getAgglomeratesForSegmentsFromDatastore BigInt(el) From eaef5a192f8cb825d2227ae99830d748d39daad7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 4 Dec 2024 11:42:25 +0100 Subject: [PATCH 265/361] migration: iron out reverts, merge updates by timestamp --- .../migration.py | 98 +++++++++---------- 1 file changed, 44 insertions(+), 54 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 94a0d9e82b3..83d6ade0d02 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -11,6 +11,7 @@ import concurrent.futures import threading from functools import partial +import heapq import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -71,7 +72,6 @@ def migrate_annotation(self, annotation): logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) - return materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") @@ -95,45 +95,12 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map[tracing_id] = editable_mapping_id return mapping_id_map - def migrate_updates_concat(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: - json_encoder = msgspec.json.Encoder() - json_decoder = msgspec.json.Decoder() - batch_size = 1000 - unified_version = 0 - version_mapping = {} - for tracing_id, layer_type in annotation["layers"].items(): - collection = self.update_collection_for_layer_type(layer_type) - version_mapping_for_layer = {0: 0} - newest_version = self.get_newest_version(tracing_id, collection) - for batch_start, batch_end in batch_range(newest_version, batch_size): - update_groups = self.get_update_batch(tracing_id, collection, batch_start, batch_end) - for version, update_group in update_groups: - update_group, _, _ = self.process_update_group(tracing_id, layer_type, update_group, json_encoder, json_decoder) - unified_version += 1 - version_mapping_for_layer[version] = unified_version - self.save_update_group(annotation['_id'], unified_version, update_group) - version_mapping[tracing_id] = version_mapping_for_layer - if tracing_id in mapping_id_map: - editable_mapping_id = mapping_id_map[tracing_id] - version_mapping_for_mapping = {0: 0} - for batch_start, batch_end in batch_range(newest_version, batch_size): - mapping_update_groups = self.get_update_batch(editable_mapping_id, "editableMappingUpdates", batch_start, batch_end) - for version, update_group in mapping_update_groups: - unified_version += 1 - version_mapping_for_mapping[version] = unified_version - self.save_update_group(annotation['_id'], unified_version, update_group) - version_mapping[editable_mapping_id] = version_mapping_for_mapping - - # TODO interleave updates rather than concat - # TODO handle existing revertToVersion update actions - return version_mapping - - def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> List[bytes]: + def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> List[Tuple[int, int, bytes]]: batch_size = 1000 newest_version = self.get_newest_version(tracing_or_mapping_id, collection) updates_for_layer = [] next_version = newest_version - for batch_start, batch_end in track(reversed(list(batch_range(newest_version, batch_size))), total=(newest_version // batch_size), description=f"Fetching updates ..."): + for batch_start, batch_end in reversed(list(batch_range(newest_version, batch_size))): if batch_start > next_version: continue update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end) @@ -145,30 +112,46 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: next_version = revert_source_version else: next_version -= 1 - # todo save actionTimestamp - updates_for_layer.append(update_group) - logger.info(f"yielding {len(updates_for_layer)} updates") + if revert_source_version is None: # skip the revert itself too, since we’re ironing them out + updates_for_layer.append((timestamp, version, update_group)) updates_for_layer.reverse() return updates_for_layer def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + all_update_groups = [] json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() - - updates_per_layer = {} - for tracing_id, layer_type in annotation["layers"].items(): + layers = list(annotation["layers"].items()) + for tracing_id, layer_type in layers: collection = self.update_collection_for_layer_type(layer_type) - updates_per_layer[tracing_id] = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder)) if tracing_id in mapping_id_map: editable_mapping_id = mapping_id_map[tracing_id] - updates_per_layer[editable_mapping_id] = self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder)) unified_version = 0 version_mapping = {} + for tracing_id, _ in layers: + version_mapping[tracing_id] = {0:0} + + queue = [] + for i, lst in enumerate(all_update_groups): + if lst: + heapq.heappush(queue, (lst[0], i, 0)) + while queue: + value, list_index, element_index = heapq.heappop(queue) + timestamp, version, update_group = value + timestamp_formatted = datetime.datetime.fromtimestamp(timestamp / 1000) + tracing_id = layers[list_index][0] + + unified_version += 1 + version_mapping[tracing_id][version] = unified_version + self.save_update_group(annotation['_id'], unified_version, update_group) + + next_element = all_update_groups[list_index][element_index + 1] if element_index + 1 < len(all_update_groups[list_index]) else None + if next_element is not None: + heapq.heappush(queue, (next_element, list_index, element_index + 1)) - # TODO merge the updates lists by timestamp, store updates, build version mapping - - logger.info(len(updates_per_layer)) return version_mapping def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: @@ -227,8 +210,8 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra update["isCompacted"] = True if name == "revertToVersion": + # Assumption: revertToVersion actions are the only ones in their group. revert_source_version = update_value["sourceVersion"] - logger.info(f"revertToVersion! source: {revert_source_version}") if "actionTimestamp" in update_value is not None: action_timestamp = update_value["actionTimestamp"] @@ -283,6 +266,8 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe materialized_versions_unified = [] materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: + if materialized_version not in layer_version_mapping[tracing_id]: + continue new_version = layer_version_mapping[tracing_id][materialized_version] value_bytes = self.get_bytes(collection, tracing_id, materialized_version) if materialized_version != new_version: @@ -299,6 +284,8 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers materialized_versions_unified = [] materialized_versions = self.list_versions(collection, tracing_id) for materialized_version in materialized_versions: + if materialized_version not in layer_version_mapping[tracing_id]: + continue new_version = layer_version_mapping[tracing_id][materialized_version] value_bytes = self.get_bytes(collection, tracing_id, materialized_version) if materialized_version != new_version or tracing_id in mapping_id_map: @@ -350,6 +337,8 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: if transform_key is not None: new_key = transform_key(key) for version, value in zip(get_versions_reply.versions, get_versions_reply.values): + if version not in layer_version_mapping[tracing_id]: + continue new_version = layer_version_mapping[tracing_id][version] self.save_bytes(collection, new_key, new_version, value) current_start_after_key = key @@ -375,6 +364,8 @@ def migrate_editable_mapping_info(self, tracing_id: str, mapping_id: str, layer_ materialized_versions_unified = [] for materialized_version in materialized_versions: value_bytes = self.get_bytes(collection, mapping_id, materialized_version) + if materialized_version not in layer_version_mapping[tracing_id]: + continue new_version = layer_version_mapping[mapping_id][materialized_version] materialized_versions_unified.append(new_version) self.save_bytes(collection, tracing_id, new_version, value_bytes) @@ -437,9 +428,8 @@ def read_annotation_list(self): query = f""" WITH annotations AS ( SELECT _id, name, description, created, modified FROM webknossos.annotations - -- WHERE modified < '{start_time}' - -- {previous_start_query} - WHERE _id = '5dc6d29d01000008d7fcb983' + WHERE modified < '{start_time}' + {previous_start_query} ORDER BY MD5(_id) LIMIT {page_size} OFFSET {page_size * page_num} @@ -477,7 +467,7 @@ def get_progress(self) -> str: duration = time.time() - self.before if done_count > 0: etr = duration / done_count * (self.total_count - done_count) - etr_formatted = f" ETR {humanize_time_diff(etr)})" + etr_formatted = f". ETR {humanize_time_diff(etr)})" else: - etr_formatted = "" - return f". ({done_count}/{self.total_count} = {percentage:.1f}% done.{etr_formatted}" + etr_formatted = ")" + return f". ({done_count}/{self.total_count} = {percentage:.1f}% done{etr_formatted}" From 8a576212a8706b13757ba06ee9f81c24e1101cc5 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 4 Dec 2024 11:46:01 +0100 Subject: [PATCH 266/361] migration: adapt to changed annotationProto format --- .../Annotation_pb2.py | 12 ++++++------ .../migration.py | 5 ++--- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py index 35e9ad6cef0..5b9fa64b93b 100644 --- a/tools/migration-unified-annotation-versioning/Annotation_pb2.py +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -13,17 +13,17 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xc0\x01\n\x0f\x41nnotationProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0f\n\x07version\x18\x03 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x04 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x05 \x02(\x03\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xb2\x01\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _ANNOTATIONLAYERTYPEPROTO._serialized_start=393 - _ANNOTATIONLAYERTYPEPROTO._serialized_end=445 + _ANNOTATIONLAYERTYPEPROTO._serialized_start=379 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=431 _ANNOTATIONPROTO._serialized_start=61 - _ANNOTATIONPROTO._serialized_end=253 - _ANNOTATIONLAYERPROTO._serialized_start=256 - _ANNOTATIONLAYERPROTO._serialized_end=391 + _ANNOTATIONPROTO._serialized_end=239 + _ANNOTATIONLAYERPROTO._serialized_start=242 + _ANNOTATIONLAYERPROTO._serialized_end=377 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 83d6ade0d02..28523913df2 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -390,10 +390,9 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi def create_and_save_annotation_proto(self, annotation, materialized_versions: List[int]): for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() - annotationProto.name = annotation["name"] - annotationProto.description = annotation["description"] + annotationProto.description = annotation["description"] or "" annotationProto.version = version - annotationProto.earliestAccessibleVersion = 0 + annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id From e46df33c390ce61ee451f6d3e5f6b9f9966f43e2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 4 Dec 2024 13:39:58 +0100 Subject: [PATCH 267/361] small cleanup in migration --- .../migration.py | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 28523913df2..1245e6a36c6 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -132,25 +132,25 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers unified_version = 0 version_mapping = {} for tracing_id, _ in layers: - version_mapping[tracing_id] = {0:0} + version_mapping[tracing_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. queue = [] - for i, lst in enumerate(all_update_groups): - if lst: - heapq.heappush(queue, (lst[0], i, 0)) + for i, update_groups_for_layer in enumerate(all_update_groups): + if update_groups_for_layer: + # The priority queue sorts tupley lexicographically, so timestamp is the main sorting key here + heapq.heappush(queue, (update_groups_for_layer[0], i, 0)) while queue: - value, list_index, element_index = heapq.heappop(queue) + value, layer_index, element_index = heapq.heappop(queue) timestamp, version, update_group = value - timestamp_formatted = datetime.datetime.fromtimestamp(timestamp / 1000) - tracing_id = layers[list_index][0] + tracing_id = layers[layer_index][0] unified_version += 1 version_mapping[tracing_id][version] = unified_version self.save_update_group(annotation['_id'], unified_version, update_group) - next_element = all_update_groups[list_index][element_index + 1] if element_index + 1 < len(all_update_groups[list_index]) else None - if next_element is not None: - heapq.heappush(queue, (next_element, list_index, element_index + 1)) + if element_index + 1 < len(all_update_groups[layer_index]): + next_element = all_update_groups[layer_index][element_index + 1] + heapq.heappush(queue, (next_element, layer_index, element_index + 1)) return version_mapping @@ -320,7 +320,7 @@ def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVe def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): list_keys_page_size = 5000 versions_page_size = 500 - current_start_after_key = tracing_id + "." # . is lexicographically before / + current_start_after_key = tracing_id + "." # . is lexicographically before / newest_tracing_version = max(layer_version_mapping[tracing_id].keys()) while True: list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) @@ -392,7 +392,7 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Li annotationProto = AnnotationProto.AnnotationProto() annotationProto.description = annotation["description"] or "" annotationProto.version = version - annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings + annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id From 25cf9900cc5b7d9c1d0073c3127965a439193ebe Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 4 Dec 2024 14:59:08 +0100 Subject: [PATCH 268/361] fix version restore preview for editable mappings --- frontend/javascripts/admin/admin_rest_api.ts | 21 +++++++++++-------- .../oxalis/model/sagas/mapping_saga.ts | 1 + 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 293e7d21e9a..b79aef8fe29 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1925,12 +1925,9 @@ export async function getAgglomeratesForSegmentsFromDatastore, - version?: number | null | undefined, ): Promise { const params = new URLSearchParams(); - if (version != null) { - params.append("version", version.toString()); - } + const segmentIdBuffer = serializeProtoListOfLong(segmentIds); const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { params.append("token", token); @@ -1958,14 +1955,20 @@ export async function getAgglomeratesForSegmentsFromTracingstore, + version?: number | null | undefined, ): Promise { + const params = new URLSearchParams(); + if (version != null) { + params.append("version", version.toString()); + } const segmentIdBuffer = serializeProtoListOfLong( // The tracing store expects the ids to be sorted segmentIds.sort((a: T, b: T) => Number(a - b)), ); - const listArrayBuffer: ArrayBuffer = await doWithToken((token) => - Request.receiveArraybuffer( - `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?token=${token}`, + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { + params.append("token", token); + return Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?${params}`, { method: "POST", body: segmentIdBuffer, @@ -1973,8 +1976,8 @@ export async function getAgglomeratesForSegmentsFromTracingstore Date: Wed, 4 Dec 2024 17:06:41 +0100 Subject: [PATCH 269/361] fix that restoring to another version broke the loading of mesh files --- frontend/javascripts/oxalis/api/wk_dev.ts | 1 + .../model/helpers/action_logger_middleware.ts | 5 +-- .../oxalis/model/sagas/mesh_saga.ts | 8 ++-- .../oxalis/model/sagas/ready_sagas.ts | 41 +++++++++++++++++++ .../oxalis/model/sagas/root_saga.ts | 5 ++- .../oxalis/model/sagas/save_saga.ts | 2 +- .../oxalis/model/sagas/wk_ready_saga.ts | 20 --------- .../javascripts/test/sagas/save_saga.spec.ts | 2 +- 8 files changed, 54 insertions(+), 30 deletions(-) create mode 100644 frontend/javascripts/oxalis/model/sagas/ready_sagas.ts delete mode 100644 frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts diff --git a/frontend/javascripts/oxalis/api/wk_dev.ts b/frontend/javascripts/oxalis/api/wk_dev.ts index d3b5c4d7f16..bd8cb397adf 100644 --- a/frontend/javascripts/oxalis/api/wk_dev.ts +++ b/frontend/javascripts/oxalis/api/wk_dev.ts @@ -10,6 +10,7 @@ import _ from "lodash"; // Can be accessed via window.webknossos.DEV.flags. Only use this // for debugging or one off scripts. export const WkDevFlags = { + logActions: false, sam: { useLocalMask: true, }, diff --git a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts index 7d444e1b9a3..45b027af15c 100644 --- a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts +++ b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts @@ -1,6 +1,7 @@ import _ from "lodash"; import type { Dispatch } from "redux"; import type { Action } from "oxalis/model/actions/actions"; +import { WkDevFlags } from "oxalis/api/wk_dev"; const MAX_ACTION_LOG_LENGTH = 250; let actionLog: string[] = []; @@ -9,8 +10,6 @@ let actionLog: string[] = []; let lastActionName: string | null = null; let lastActionCount: number = 0; -const DEBUG_OUTPUT_FOR_ACTIONS = false; - const actionBlacklist = [ "ADD_TO_LAYER", "MOVE_FLYCAM", @@ -51,7 +50,7 @@ export default function actionLoggerMiddleware(): ( const overflowCount = Math.max(actionLog.length - MAX_ACTION_LOG_LENGTH, 0); actionLog = _.drop(actionLog, overflowCount); - if (DEBUG_OUTPUT_FOR_ACTIONS) { + if (WkDevFlags.logActions) { console.group(action.type); console.info("dispatching", action); let result = next(action); diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 2fdf47b3710..962438e9da0 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -82,6 +82,7 @@ import type { FlycamAction } from "../actions/flycam_actions"; import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; import type { BufferGeometryWithInfo } from "oxalis/controller/segment_mesh_controller"; import { WkDevFlags } from "oxalis/api/wk_dev"; +import { ensureSceneControllerReady, ensureWkReady } from "./ready_sagas"; export const NO_LOD_MESH_INDEX = -1; const MAX_RETRY_COUNT = 5; @@ -673,7 +674,7 @@ function* _refreshMeshWithMap( // Avoid redundant fetches of mesh files for the same layer by // storing Deferreds per layer lazily. -const fetchDeferredsPerLayer: Record, unknown>> = {}; +let fetchDeferredsPerLayer: Record, unknown>> = {}; function* maybeFetchMeshFiles(action: MaybeFetchMeshFilesAction): Saga { const { segmentationLayer, dataset, mustRequest, autoActivate, callback } = action; @@ -1277,13 +1278,14 @@ function* handleBatchSegmentColorChange( } export default function* meshSaga(): Saga { + fetchDeferredsPerLayer = {}; // Buffer actions since they might be dispatched before WK_READY const loadAdHocMeshActionChannel = yield* actionChannel("LOAD_AD_HOC_MESH_ACTION"); const loadPrecomputedMeshActionChannel = yield* actionChannel("LOAD_PRECOMPUTED_MESH_ACTION"); const maybeFetchMeshFilesActionChannel = yield* actionChannel("MAYBE_FETCH_MESH_FILES"); - yield* take("SCENE_CONTROLLER_READY"); - yield* take("WK_READY"); + yield* call(ensureSceneControllerReady); + yield* call(ensureWkReady); yield* takeEvery(maybeFetchMeshFilesActionChannel, maybeFetchMeshFiles); yield* takeEvery(loadAdHocMeshActionChannel, loadAdHocMeshFromAction); yield* takeEvery(loadPrecomputedMeshActionChannel, loadPrecomputedMesh); diff --git a/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts b/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts new file mode 100644 index 00000000000..25898d935b3 --- /dev/null +++ b/frontend/javascripts/oxalis/model/sagas/ready_sagas.ts @@ -0,0 +1,41 @@ +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { take, takeEvery } from "typed-redux-saga"; + +let isWkReady = false; +let isSceneControllerReady = false; + +function setWkReady() { + isWkReady = true; +} + +function setSceneControllerReady() { + isSceneControllerReady = true; +} + +export function setWkReadyToFalse() { + isWkReady = false; +} + +function* listenForWkReady(): Saga { + yield* takeEvery("WK_READY", setWkReady); +} +function* listenForSceneControllerReady(): Saga { + yield* takeEvery("SCENE_CONTROLLER_READY", setSceneControllerReady); +} + +// The following two sagas are useful for other sagas that might be instantiated before or after +// the {WK,SCENE_CONTROLLER}_READY action was dispatched. If the action was dispatched +// before, this saga immediately returns, otherwise it waits +// until the action is dispatched. + +export function* ensureWkReady(): Saga { + if (isWkReady) return; + yield* take("WK_READY"); +} + +export function* ensureSceneControllerReady(): Saga { + if (isSceneControllerReady) return; + yield* take("SCENE_CONTROLLER_READY"); +} + +export default [listenForWkReady, listenForSceneControllerReady]; diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.ts b/frontend/javascripts/oxalis/model/sagas/root_saga.ts index 8e03f177535..e42faad6d62 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.ts @@ -17,7 +17,7 @@ import loadHistogramDataSaga from "oxalis/model/sagas/load_histogram_data_saga"; import listenToClipHistogramSaga from "oxalis/model/sagas/clip_histogram_saga"; import MappingSaga from "oxalis/model/sagas/mapping_saga"; import ProofreadSaga from "oxalis/model/sagas/proofread_saga"; -import { listenForWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import ReadySagas, { setWkReadyToFalse } from "oxalis/model/sagas/ready_sagas"; import { warnIfEmailIsUnverified } from "./user_saga"; import type { EscalateErrorAction } from "../actions/actions"; @@ -28,6 +28,7 @@ export default function* rootSaga(): Saga { const task = yield* fork(restartableSaga); yield* take("RESTART_SAGA"); yield* cancel(task); + yield* call(setWkReadyToFalse); } } export function hasRootSagaCrashed() { @@ -46,7 +47,7 @@ function* listenToErrorEscalation() { function* restartableSaga(): Saga { try { yield* all([ - call(listenForWkReady), + ...ReadySagas.map((saga) => call(saga)), call(warnAboutMagRestriction), call(SettingsSaga), ...SkeletontracingSagas.map((saga) => call(saga)), diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index de38d60c8a4..3f309ffb0f6 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -44,7 +44,7 @@ import { type UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; -import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import { Model } from "oxalis/singletons"; import type { CameraData, diff --git a/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts b/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts deleted file mode 100644 index b38ddfc02ac..00000000000 --- a/frontend/javascripts/oxalis/model/sagas/wk_ready_saga.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { take, takeEvery } from "typed-redux-saga"; - -let isWkReady = false; - -function setWkReady() { - isWkReady = true; -} -export function* listenForWkReady(): Saga { - yield* takeEvery("WK_READY", setWkReady); -} - -export function* ensureWkReady(): Saga { - // This saga is useful for sagas that might be instantiated before or after - // the WK_READY action was dispatched. If the action was dispatched - // before, this saga immediately returns, otherwise it waits - // until the action is dispatched. - if (isWkReady) return; - yield* take("WK_READY"); -} diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 4810ea96dc8..107d06edcbd 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -3,7 +3,7 @@ import { alert } from "libs/window"; import { setSaveBusyAction } from "oxalis/model/actions/save_actions"; import DiffableMap from "libs/diffable_map"; import compactSaveQueue from "oxalis/model/helpers/compaction/compact_save_queue"; -import { ensureWkReady } from "oxalis/model/sagas/wk_ready_saga"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import mockRequire from "mock-require"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; From 6f140d9c7aa5c93e3e7239107142f069cf1d0474 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 5 Dec 2024 13:48:11 +0100 Subject: [PATCH 270/361] fix ordering in updateActionLog --- .../tracingstore/annotation/TSAnnotationService.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 10430fe52d0..3d931cccb14 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -231,7 +231,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss "value" -> Json.toJson(tuple._2) ) - val batchRanges = SequenceUtils.batchRangeInclusive(oldestVersion, newestVersion, batchSize = 100) + val batchRanges = SequenceUtils.batchRangeInclusive(oldestVersion, newestVersion, batchSize = 1000).reverse for { updateActionBatches <- Fox.serialCombined(batchRanges.toList) { batchRange => val batchFrom = batchRange._1 From 05ca4aa926c36893a38e7edd821f93d52afedc98 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 5 Dec 2024 14:36:36 +0100 Subject: [PATCH 271/361] migration: wip select which to materialize --- .../migration.py | 35 +++++++++++++++---- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 1245e6a36c6..3f28b04b822 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -5,7 +5,7 @@ import logging import datetime import time -from typing import Dict, Tuple, List, Optional, Callable +from typing import Dict, Tuple, List, Optional, Callable, Set from rich.progress import track import msgspec import concurrent.futures @@ -62,8 +62,8 @@ def migrate_annotation(self, annotation): if self.args.count_versions: versions = 0 for tracing_id, layer_type in annotation["layers"].items(): - collection = self.update_collection_for_layer_type(layer_type) - newest_version = self.get_newest_version(tracing_id, collection) + update_collection = self.update_collection_for_layer_type(layer_type) + newest_version = self.get_newest_version(tracing_id, update_collection) versions += newest_version if versions > 1: logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") @@ -73,6 +73,7 @@ def migrate_annotation(self, annotation): mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) + logger.info(f"saved materialized versions {materialized_versions}") if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") self.create_and_save_annotation_proto(annotation, materialized_versions) @@ -146,6 +147,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers unified_version += 1 version_mapping[tracing_id][version] = unified_version + logger.info(f"saving update v{unified_version}") self.save_update_group(annotation['_id'], unified_version, update_group) if element_index + 1 < len(all_update_groups[layer_index]): @@ -243,14 +245,31 @@ def update_collection_for_layer_type(self, layer_type): return "skeletonUpdates" return "volumeUpdates" - def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: - materialized_versions = [] + def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> Set[int]: + #newest_to_materialize = self.get_newest_to_materialize(annotation, layer_version_mapping, mapping_id_map) + + materialized_versions = set() for tracing_id, tracing_type in annotation["layers"].items(): materialized_versions_of_layer = \ self.migrate_materialized_layer(tracing_id, tracing_type, layer_version_mapping, mapping_id_map) - materialized_versions += materialized_versions_of_layer + materialized_versions.update(materialized_versions_of_layer) return materialized_versions + def get_newest_to_materialize(self, annotation, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> int: + newest_to_materialize = 0 + for tracing_id, tracing_type in annotation["layers"].items(): + if tracing_type == "Skeleton": + collection = "skeletons" + else: + collection = "volumes" + # TODO what if the newest materialized is dropped because of a revert? + newest_materialized = layer_version_mapping[tracing_id][self.get_newest_version(tracing_id, collection)] + newest_to_materialize = min(newest_to_materialize, newest_materialized) + for editable_mapping_id in mapping_id_map.values(): + newest_materialized = layer_version_mapping[editable_mapping_id][self.get_newest_version(editable_mapping_id, "editableMappingsInfo")] + newest_to_materialize = min(newest_to_materialize, newest_materialized) + return newest_to_materialize + def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if layer_type == "Skeleton": return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) @@ -276,6 +295,7 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe skeleton.version = new_version value_bytes = skeleton.SerializeToString() materialized_versions_unified.append(new_version) + logger.info(f"saving materialized skeleton {new_version}") self.save_bytes(collection, tracing_id, new_version, value_bytes) return materialized_versions_unified @@ -296,6 +316,7 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers volume.mappingName = tracing_id value_bytes = volume.SerializeToString() materialized_versions_unified.append(new_version) + logger.info(f"saving materialized volume {new_version}") self.save_bytes(collection, tracing_id, new_version, value_bytes) return materialized_versions_unified @@ -387,7 +408,7 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi transform_key=partial(self.replace_before_first_slash, tracing_id) ) - def create_and_save_annotation_proto(self, annotation, materialized_versions: List[int]): + def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int]): for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() annotationProto.description = annotation["description"] or "" From 408bb52eaad80eb19b4f4976fb513cffab53082f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 5 Dec 2024 16:41:29 +0100 Subject: [PATCH 272/361] reload page when switching to a version in the version restore view that has different layers (including debugging code because it does not work properly) --- frontend/javascripts/admin/admin_rest_api.ts | 21 +++++++--- .../oxalis/model_initialization.ts | 1 + .../oxalis/view/action_bar_view.tsx | 2 +- .../javascripts/oxalis/view/version_list.tsx | 38 +++++++++++++++++-- .../test/sagas/skeletontracing_saga.spec.ts | 18 --------- frontend/javascripts/types/api_flow_types.ts | 16 ++++---- 6 files changed, 60 insertions(+), 36 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index b79aef8fe29..93d40be0c0b 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -719,7 +719,8 @@ export async function acquireAnnotationMutex( } export async function getTracingForAnnotationType( - annotation: APIAnnotation, + // todop: revert to APIAnnotation + annotation: { tracingStore: { url: string } }, annotationLayerDescriptor: AnnotationLayerDescriptor, version?: number | null | undefined, ): Promise { @@ -767,7 +768,7 @@ export function getUpdateActionLog( oldestVersion?: number, newestVersion?: number, ): Promise> { - return doWithToken((token) => { + return doWithToken(async (token) => { const params = new URLSearchParams(); params.append("token", token); if (oldestVersion != null) { @@ -776,9 +777,14 @@ export function getUpdateActionLog( if (newestVersion != null) { params.append("newestVersion", newestVersion.toString()); } - return Request.receiveJSON( + const entries = (await Request.receiveJSON( `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, - ); + )) as APIUpdateActionBatch[]; + + // todop: should not be necessary soon + entries.sort((a, b) => b.version - a.version); + + return entries; }); } @@ -813,7 +819,12 @@ export async function getAnnotationProto( }, ); }); - return parseProtoAnnotation(annotationArrayBuffer); + const annotationProto = parseProtoAnnotation(annotationArrayBuffer); + if (!process.env.IS_TESTING) { + // Log to console as the decoded annotationProto is hard to inspect in the devtools otherwise. + console.log("Parsed protobuf annotation:", annotationProto); + } + return annotationProto; } export function hasSegmentIndexInDataStore( diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 3ed02f18125..c2eb23947b5 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -137,6 +137,7 @@ export async function initialize( if (initialMaybeCompoundType != null) { annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); } else { + // todop: can we improve this? let maybeOutdatedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); annotationProto = await getAnnotationProto( maybeOutdatedAnnotation.tracingStore.url, diff --git a/frontend/javascripts/oxalis/view/action_bar_view.tsx b/frontend/javascripts/oxalis/view/action_bar_view.tsx index ef4a68f6a6b..d011c949e5a 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.tsx +++ b/frontend/javascripts/oxalis/view/action_bar_view.tsx @@ -195,7 +195,7 @@ class ActionBarView extends React.PureComponent { fallbackLayerName, maybeMappingName, ); - location.href = `${location.origin}/annotations/${annotation.typ}/${annotation.id}${location.hash}`; + location.href = `${location.origin}/annotations/${annotation.id}${location.hash}`; }; renderStartAIJobButton(disabled: boolean, tooltipTextIfDisabled: string): React.ReactNode { diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 0daf207db57..ddb13595d0e 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -8,6 +8,8 @@ import { getUpdateActionLog, downloadAnnotation, getNewestVersionForAnnotation, + getAnnotationProto, + getTracingForAnnotationType, } from "admin/admin_rest_api"; import { handleGenericError } from "libs/error_handling"; import { @@ -48,14 +50,42 @@ const VERSION_LIST_PLACEHOLDER = { export async function previewVersion(version?: number) { const state = Store.getState(); const { controlMode } = state.temporaryConfiguration; - const { annotationId } = state.tracing; + const { annotationId, tracingStore, annotationLayers } = state.tracing; + + const annotationProto = await getAnnotationProto(tracingStore.url, annotationId, version); + + if ( + !_.isEqual( + annotationProto.annotationLayers.map((l) => l.tracingId), + annotationLayers.map((l) => l.tracingId), + ) + ) { + const params = new URLSearchParams(); + params.append("showVersionRestore", "true"); + params.append("version", `${version}`); + // todop: do this + // location.href = `${location.origin}/annotations/${annotationId}?${params}${location.hash}`; + + // todop: remove this (it's only for testing) + for (const layer of annotationProto.annotationLayers) { + await getTracingForAnnotationType( + state.tracing, + { + name: "irrelevant hopefully", + tracingId: layer.tracingId, + typ: layer.type, + }, + version, + ); + } + + return; + } + await api.tracing.restart(null, annotationId, controlMode, version); Store.dispatch(setAnnotationAllowUpdateAction(false)); const segmentationLayersToReload = []; - // TODOp: properly determine which layers to reload. - // No versions were passed which means that the newest annotation should be - // shown. Therefore, reload all segmentation layers. segmentationLayersToReload.push(...Model.getSegmentationTracingLayers()); for (const segmentationLayer of segmentationLayersToReload) { diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index dc66fd56a9f..f8d60721fca 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -79,24 +79,6 @@ function testDiffing( ); } -// TODOp not used? -// biome-ignore lint/correctness/noUnusedVariables: -function compactSaveQueueWithUpdateActions( - queue: Array, - tracing: SkeletonTracing, -): Array { - return compactSaveQueue( - // todop - // Do we really need compactSaveQueueWithUpdateActions? actually, compactUpdateActions - // is never called with a save queue in prod (instead, the function is called before - // filling the save queue). one could probably combine compactUpdateActions and - // createSaveQueueFromUpdateActions to have a createCompactedSaveQueueFromUpdateActions - // helper function and use that in this spec. - // @ts-ignore - queue.map((batch) => ({ ...batch, actions: compactUpdateActions(batch.actions, tracing) })), - ); -} - function createCompactedSaveQueueFromUpdateActions( updateActions: UpdateActionWithoutIsolationRequirement[][], timestamp: number, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 2666cb99138..0dd92e85ffd 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -577,17 +577,17 @@ export type APITimeTrackingPerAnnotation = { annotationLayerStats: TracingStats; }; type APITracingStoreAnnotationLayer = { - tracingId: string; - name: string; - type: AnnotationLayerType; + readonly tracingId: string; + readonly name: string; + readonly type: AnnotationLayerType; }; export type APITracingStoreAnnotation = { - name: string; - description: string; - version: number; - earliestAccessibleVersion: number; - annotationLayers: APITracingStoreAnnotationLayer[]; + readonly name: string; + readonly description: string; + readonly version: number; + readonly earliestAccessibleVersion: number; + readonly annotationLayers: APITracingStoreAnnotationLayer[]; }; export type APITimeTrackingPerUser = { From 3dca73c4222e69ae7342526a37f88cc61edc9fbb Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 5 Dec 2024 17:09:31 +0100 Subject: [PATCH 273/361] revert temporary sorting fix in FE --- frontend/javascripts/admin/admin_rest_api.ts | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 93d40be0c0b..71b4e8620e6 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -768,7 +768,7 @@ export function getUpdateActionLog( oldestVersion?: number, newestVersion?: number, ): Promise> { - return doWithToken(async (token) => { + return doWithToken((token) => { const params = new URLSearchParams(); params.append("token", token); if (oldestVersion != null) { @@ -777,14 +777,9 @@ export function getUpdateActionLog( if (newestVersion != null) { params.append("newestVersion", newestVersion.toString()); } - const entries = (await Request.receiveJSON( + return Request.receiveJSON( `${tracingStoreUrl}/tracings/annotation/${annotationId}/updateActionLog?${params}`, - )) as APIUpdateActionBatch[]; - - // todop: should not be necessary soon - entries.sort((a, b) => b.version - a.version); - - return entries; + ); }); } From d160263695dbc15d4757cc5e6efe147add1f9207 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 6 Dec 2024 16:47:55 +0100 Subject: [PATCH 274/361] respect earliestAccessibleVersion in version restore view --- frontend/javascripts/oxalis/default_state.ts | 1 + .../oxalis/model/reducers/reducer_helpers.ts | 2 + .../oxalis/model_initialization.ts | 14 ++++++- frontend/javascripts/oxalis/store.ts | 1 + .../javascripts/oxalis/view/version_list.tsx | 41 +++++++++++++------ .../test/geometries/skeleton.spec.ts | 2 +- .../test/sagas/skeletontracing_saga.spec.ts | 8 +--- 7 files changed, 47 insertions(+), 22 deletions(-) diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 7cce34d3b55..ade92b04250 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -178,6 +178,7 @@ const defaultState: OxalisState = { blockedByUser: null, annotationLayers: [], version: 0, + earliestAccessibleVersion: 0, stats: {}, }, save: { diff --git a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts index 1a5f73098ba..756d1d95469 100644 --- a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts @@ -87,6 +87,7 @@ export function convertPointToVecInBoundingBox(boundingBox: ServerBoundingBox): export function convertServerAnnotationToFrontendAnnotation( annotation: APIAnnotation, version: number, + earliestAccessibleVersion: number, ): Annotation { const { id: annotationId, @@ -110,6 +111,7 @@ export function convertServerAnnotationToFrontendAnnotation( visibility, tags, version, + earliestAccessibleVersion, stats, description, name, diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index c2eb23947b5..0d601dfe3ab 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -155,6 +155,7 @@ export async function initialize( ...maybeOutdatedAnnotation, name: annotationProto.name, description: annotationProto.description, + annotationProto: annotationProto.earliestAccessibleVersion, annotationLayers: layersWithStats, }; annotation = completeAnnotation; @@ -234,7 +235,13 @@ export async function initialize( // Satisfy TS. annotationProto should always exist if annotation exists. throw new Error("Annotation protobuf should not be null."); } - initializeAnnotation(annotation, annotationProto.version, serverTracings, editableMappings); + initializeAnnotation( + annotation, + annotationProto.version, + annotationProto.earliestAccessibleVersion, + serverTracings, + editableMappings, + ); } else { // In view only tracings we need to set the view mode too. const { allowedModes } = determineAllowedModes(); @@ -308,6 +315,7 @@ function maybeWarnAboutUnsupportedLayers(layers: Array): void { function initializeAnnotation( _annotation: APIAnnotation, version: number, + earliestAccessibleVersion: number, serverTracings: Array, editableMappings: Array, ) { @@ -340,7 +348,9 @@ function initializeAnnotation( } Store.dispatch( - initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, version)), + initializeAnnotationAction( + convertServerAnnotationToFrontendAnnotation(annotation, version, earliestAccessibleVersion), + ), ); getServerVolumeTracings(serverTracings).map((volumeTracing) => { ErrorHandling.assert( diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 5daae355c1c..d49560bea64 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -192,6 +192,7 @@ export type RestrictionsAndSettings = Restrictions & Settings; export type Annotation = { readonly annotationId: string; readonly version: number; + readonly earliestAccessibleVersion: number; readonly restrictions: RestrictionsAndSettings; readonly visibility: AnnotationVisibility; readonly annotationLayers: Array; diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index ddb13595d0e..436884d01bb 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -135,6 +135,7 @@ async function getUpdateActionLogPage( tracing: HybridTracing, tracingStoreUrl: string, annotationId: string, + earliestAccessibleVersion: number, newestVersion: number, // 0 is the "newest" page (i.e., the page in which the newest version is) relativePageNumber: number, @@ -148,17 +149,20 @@ async function getUpdateActionLogPage( // For example, the following parameters would be a valid variable set // (assuming ENTRIES_PER_PAGE = 2): // newestVersion = 23 - // relativePageNumber = 1 - // absolutePageNumber = ⌊11.5⌋ - 1 = 10 - // newestVersion = 22 - // oldestVersion = 21 + // relativePageNumber = 1 (0 is the newest, 1 is the second newest) + // absolutePageNumber = ⌊23/2⌋ - 1 = 10 + // newestVersionInPage = 22 + // oldestVersionInPage = 21 // Thus, versions 21 and 22 will be fetched for the second newest page const absolutePageNumber = Math.floor(newestVersion / ENTRIES_PER_PAGE) - relativePageNumber; if (absolutePageNumber < 0) { throw new Error("Negative absolute page number received."); } const newestVersionInPage = (1 + absolutePageNumber) * ENTRIES_PER_PAGE; - const oldestVersionInPage = absolutePageNumber * ENTRIES_PER_PAGE + 1; + const oldestVersionInPage = Math.max( + absolutePageNumber * ENTRIES_PER_PAGE + 1, + earliestAccessibleVersion, + ); const updateActionLog = await getUpdateActionLog( tracingStoreUrl, @@ -177,7 +181,10 @@ async function getUpdateActionLogPage( } // nextPage will contain older versions - const nextPage = oldestVersionInPage > 1 ? relativePageNumber + 1 : undefined; + const nextPage = + oldestVersionInPage > Math.max(earliestAccessibleVersion, 1) + ? relativePageNumber + 1 + : undefined; // previousPage will contain newer versions const previousPage = newestVersion > newestVersionInPage ? relativePageNumber - 1 : undefined; @@ -216,14 +223,20 @@ function InnerVersionList(props: Props & { newestVersion: number }) { const [initialVersion] = useState(tracing.version); function fetchPaginatedVersions({ pageParam }: { pageParam?: number }) { - // TODOp: maybe refactor this so that this method is not calculated very rendering cycle if (pageParam == null) { pageParam = Math.floor((newestVersion - initialVersion) / ENTRIES_PER_PAGE); } const { url: tracingStoreUrl } = Store.getState().tracing.tracingStore; - const annotationId = Store.getState().tracing.annotationId; - - return getUpdateActionLogPage(tracing, tracingStoreUrl, annotationId, newestVersion, pageParam); + const { annotationId, earliestAccessibleVersion } = Store.getState().tracing; + + return getUpdateActionLogPage( + tracing, + tracingStoreUrl, + annotationId, + earliestAccessibleVersion, + newestVersion, + pageParam, + ); } const queryKey = ["versions", tracing.annotationId]; @@ -329,13 +342,17 @@ function InnerVersionList(props: Props & { newestVersion: number }) { } /> )} - {hasNextPage && ( + {hasNextPage ? (
- )} + ) : tracing.earliestAccessibleVersion > 0 ? ( +
+ Cannot show versions earlier than {tracing.earliestAccessibleVersion}. +
+ ) : null} ); } diff --git a/frontend/javascripts/test/geometries/skeleton.spec.ts b/frontend/javascripts/test/geometries/skeleton.spec.ts index eb1930cbf2f..63e5a5e45f0 100644 --- a/frontend/javascripts/test/geometries/skeleton.spec.ts +++ b/frontend/javascripts/test/geometries/skeleton.spec.ts @@ -40,7 +40,7 @@ test.before((t) => { tracing.trees = []; delete tracing.activeNodeId; Store.dispatch( - initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, 0)), + initializeAnnotationAction(convertServerAnnotationToFrontendAnnotation(annotation, 0, 0)), ); Store.dispatch(initializeSkeletonTracingAction(tracing)); diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index f8d60721fca..d125f408ca3 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -1,11 +1,5 @@ import "test/sagas/skeletontracing_saga.mock.js"; -import type { - Flycam, - HybridTracing, - OxalisState, - SaveQueueEntry, - SkeletonTracing, -} from "oxalis/store"; +import type { Flycam, HybridTracing, OxalisState, SkeletonTracing } from "oxalis/store"; import ChainReducer from "test/helpers/chainReducer"; import DiffableMap from "libs/diffable_map"; import EdgeCollection from "oxalis/model/edge_collection"; From 625403daf73e0ee3703940e0397d8df5d6e85bb7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 6 Dec 2024 17:01:00 +0100 Subject: [PATCH 275/361] fix editing of annotation name --- .../oxalis/model/sagas/annotation_saga.tsx | 18 ++++++------------ .../oxalis/model/sagas/update_actions.ts | 4 ++-- .../javascripts/oxalis/model_initialization.ts | 1 - .../javascripts/oxalis/view/version_entry.tsx | 13 ++++--------- .../fixtures/skeletontracing_server_objects.ts | 1 - .../fixtures/tasktracing_server_objects.ts | 1 - .../fixtures/volumetracing_server_objects.ts | 1 - frontend/javascripts/types/api_flow_types.ts | 1 - 8 files changed, 12 insertions(+), 28 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index f8d66ae28a2..fb3315f1150 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -5,7 +5,6 @@ import { type EditAnnotationLayerAction, setAnnotationAllowUpdateAction, type SetAnnotationDescriptionAction, - type SetAnnotationNameAction, setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; @@ -53,20 +52,12 @@ import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_ */ const MAX_MAG_FOR_AGGLOMERATE_MAPPING = 16; -export function* pushAnnotationNameUpdateAction(action: SetAnnotationNameAction) { - const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); - if (!mayEdit) { - return; - } - yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(action.name)])); -} - export function* pushAnnotationDescriptionUpdateAction(action: SetAnnotationDescriptionAction) { const mayEdit = yield* select((state) => mayEditAnnotationProperties(state)); if (!mayEdit) { return; } - yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(undefined, action.description)])); + yield* put(pushSaveQueueTransaction([updateMetadataOfAnnotation(action.description)])); } export function* pushAnnotationUpdateAsync(action: Action) { @@ -86,6 +77,7 @@ export function* pushAnnotationUpdateAsync(action: Action) { }; // The extra type annotation is needed here for flow const editObject: Partial = { + name: tracing.name, visibility: tracing.visibility, viewConfiguration, }; @@ -220,8 +212,10 @@ export function* watchAnnotationAsync(): Saga { // name, only the latest action is relevant. If `_takeEvery` was used, // all updates to the annotation name would be retried regularly, which // would also cause race conditions. - yield* takeLatest("SET_ANNOTATION_NAME", pushAnnotationNameUpdateAction); - yield* takeLatest("SET_ANNOTATION_VISIBILITY", pushAnnotationUpdateAsync); + yield* takeLatest( + ["SET_ANNOTATION_NAME", "SET_ANNOTATION_VISIBILITY"], + pushAnnotationUpdateAsync, + ); yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationDescriptionUpdateAction); yield* takeLatest( ((action: Action) => diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 46228300519..36ac80c6071 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -658,10 +658,10 @@ export function updateAnnotationLayerName(tracingId: string, newLayerName: strin } as const; } -export function updateMetadataOfAnnotation(name?: string, description?: string) { +export function updateMetadataOfAnnotation(description: string) { return { name: "updateMetadataOfAnnotation", - value: { name, description }, + value: { description }, } as const; } diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 0d601dfe3ab..b14139a20cb 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -153,7 +153,6 @@ export async function initialize( }); const completeAnnotation = { ...maybeOutdatedAnnotation, - name: annotationProto.name, description: annotationProto.description, annotationProto: annotationProto.earliestAccessibleVersion, annotationLayers: layersWithStats, diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index b588c453528..0cb02fa0861 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -283,15 +283,10 @@ const descriptionFns: Record< icon: , }), updateMetadataOfAnnotation: (action: UpdateMetadataOfAnnotationUpdateAction): Description => { - const updatedName = action.value.name != null; - const updatedDescription = action.value.description != null; - const updatedText = - updatedName && updatedDescription - ? "name and description" - : updatedName - ? "name" - : "description"; - return { description: `Updated the ${updatedText} of the annotation.`, icon: }; + return { + description: `Updated the description of the annotation to: ${action.value.description.slice(0, 100) || ""}`, + icon: , + }; }, } as const; diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 018ad315d4f..0d2a638f470 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -217,7 +217,6 @@ export const annotation: APIAnnotation = { }; export const annotationProto: APITracingStoreAnnotation = { - name: "skeleton-annotation-name", description: "skeleton-annotation-description", version: 1, earliestAccessibleVersion: 0, diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 165ccd7da05..2ae24f42f5a 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -184,7 +184,6 @@ export const annotation: APIAnnotation = { ], }; export const annotationProto: APITracingStoreAnnotation = { - name: "task-annotation-name", description: "task-annotation-description", version: 1, earliestAccessibleVersion: 0, diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index 63e8ae2141f..b207600ab60 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -125,7 +125,6 @@ export const annotation: APIAnnotation = { isLockedByOwner: false, }; export const annotationProto: APITracingStoreAnnotation = { - name: "volume-annotation-name", description: "volume-annotation-description", version: 1, earliestAccessibleVersion: 0, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 0dd92e85ffd..e9d06197cb3 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -583,7 +583,6 @@ type APITracingStoreAnnotationLayer = { }; export type APITracingStoreAnnotation = { - readonly name: string; readonly description: string; readonly version: number; readonly earliestAccessibleVersion: number; From 39cc6567d23682ce101a06c99dec730b9668b323 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 6 Dec 2024 17:09:16 +0100 Subject: [PATCH 276/361] fix that newest version was also requested even when no annotation exists (e.g., in dashboard or in DS view mode) --- frontend/javascripts/oxalis/model/sagas/save_saga.ts | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 3f309ffb0f6..79137a1a781 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -489,6 +489,10 @@ function* watchForSaveConflicts() { maybeSkeletonTracing, ]); + if (tracings.length === 0) { + return; + } + const versionOnServer = yield* call( getNewestVersionForAnnotation, tracingStoreUrl, @@ -548,6 +552,8 @@ function* watchForSaveConflicts() { return VERSION_POLL_INTERVAL_SINGLE_EDITOR; } + yield* call(ensureWkReady); + while (true) { const interval = yield* call(getPollInterval); yield* call(sleep, interval); From 70988b7d5f0324165a80c2746acc35731e898979 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 6 Dec 2024 17:17:36 +0100 Subject: [PATCH 277/361] fix tooltip for segment stats --- .../view/right-border-tabs/dataset_info_tab_view.tsx | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 303be51ef43..789543c974f 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -1,7 +1,7 @@ import type { Dispatch } from "redux"; import { Typography, Tag } from "antd"; import { SettingOutlined, InfoCircleOutlined, EditOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; +import { connect, useSelector } from "react-redux"; import Markdown from "libs/markdown_adapter"; import React, { type CSSProperties } from "react"; import { Link } from "react-router-dom"; @@ -210,6 +210,8 @@ export function AnnotationStats({ asInfoBlock: boolean; withMargin?: boolean | null | undefined; }) { + const dataset = useSelector((state: OxalisState) => state.dataset); + const annotation = useSelector((state: OxalisState) => state.tracing); if (!stats || Object.keys(stats).length === 0) return null; const formatLabel = (str: string) => (asInfoBlock ? str : ""); const useStyleWithMargin = withMargin != null ? withMargin : true; @@ -221,7 +223,7 @@ export function AnnotationStats({ const segmentCountDetails = volumeStats .map( ([layerName, volume]) => - `

${layerName}: ${volume.segmentCount} ${pluralize("Segment", volume.segmentCount)}

`, + `

${getReadableNameForLayerName(dataset, annotation, layerName)}: ${volume.segmentCount} ${pluralize("Segment", volume.segmentCount)}

`, ) .join(""); @@ -259,7 +261,7 @@ export function AnnotationStats({ {volumeStats.length > 0 ? ( Date: Mon, 9 Dec 2024 11:07:22 +0100 Subject: [PATCH 278/361] require passing annotationid to some tracingstore routes --- app/controllers/AnnotationIOController.scala | 7 ++++--- .../WKRemoteTracingStoreController.scala | 1 + .../AnnotationLayerPrecedence.scala | 16 +++++++++----- app/models/annotation/AnnotationService.scala | 21 +++++++------------ .../WKRemoteTracingStoreClient.scala | 12 +++++++---- webknossos-datastore/proto/Annotation.proto | 1 + .../EditableMappingController.scala | 6 ++---- .../SkeletonTracingController.scala | 3 +-- .../controllers/VolumeTracingController.scala | 6 ++---- ...alableminds.webknossos.tracingstore.routes | 10 ++++----- 10 files changed, 43 insertions(+), 40 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 723e1e79633..1d42a0fafd6 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -401,7 +401,7 @@ class AnnotationIOController @Inject()( for { tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedAnnotationLayers <- Fox.serialCombined(annotation.skeletonAnnotationLayers)( - tracingStoreClient.getSkeletonTracing(_, version)) + tracingStoreClient.getSkeletonTracing(annotation._id, _, version)) user <- userService.findOneCached(annotation._user)(GlobalAccessContext) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) nmlStream = nmlWriter.toNmlStream( @@ -433,7 +433,8 @@ class AnnotationIOController @Inject()( tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedVolumeLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.volumeAnnotationLayers) { volumeAnnotationLayer => - tracingStoreClient.getVolumeTracing(volumeAnnotationLayer, + tracingStoreClient.getVolumeTracing(annotation._id, + volumeAnnotationLayer, version, skipVolumeData, volumeDataZipFormat, @@ -441,7 +442,7 @@ class AnnotationIOController @Inject()( } ?~> "annotation.download.fetchVolumeLayer.failed" fetchedSkeletonLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.skeletonAnnotationLayers) { skeletonAnnotationLayer => - tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, version) + tracingStoreClient.getSkeletonTracing(annotation._id, skeletonAnnotationLayer, version) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 6a8a2dba8da..118b001ab68 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -186,6 +186,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore dataset <- datasetDAO.findOne(annotation._dataset) tracingEither <- annotationService.createTracingForExplorational(dataset, request.body, + Some(annotation._id), annotation.annotationLayers, Some(previousVersion)) tracing: GeneratedMessage = tracingEither match { diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala index 96ac3ec77fd..b5c94983966 100644 --- a/app/models/annotation/AnnotationLayerPrecedence.scala +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -1,5 +1,6 @@ package models.annotation +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -69,13 +70,15 @@ trait AnnotationLayerPrecedence { ) }.getOrElse(volumeTracing) - protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], + protected def getOldPrecedenceLayerProperties(existingAnnotationId: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = for { - oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationId, + existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -104,7 +107,8 @@ trait AnnotationLayerPrecedence { } else Fox.failure("Trying to select precedence layer from empty layer list.") } - private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + private def fetchOldPrecedenceLayer(existingAnnotationIdOpt: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( @@ -112,11 +116,13 @@ trait AnnotationLayerPrecedence { if (existingAnnotationLayers.isEmpty) Fox.successful(None) else for { + existingAnnotationId <- existingAnnotationIdOpt.toFox ?~> "fetchOldPrecedenceLayer.needsAnnotationId" oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, previousVersion) + tracingStoreClient.getSkeletonTracing(existingAnnotationId, oldPrecedenceLayer, previousVersion) else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, + tracingStoreClient.getVolumeTracing(existingAnnotationId, + oldPrecedenceLayer, previousVersion, skipVolumeData = true, volumeDataZipFormat = VolumeDataZipFormat.wkw, diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 39557adb1ab..c6ee592db24 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -165,6 +165,7 @@ class AnnotationService @Inject()( def createTracingForExplorational(dataset: Dataset, params: AnnotationLayerParameters, + existingAnnotationId: Option[ObjectId], existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long])( implicit ctx: DBAccessContext, @@ -209,7 +210,8 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationId, + existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -245,8 +247,7 @@ class AnnotationService @Inject()( private def createLayersForExplorational(dataset: Dataset, annotationId: ObjectId, - allAnnotationLayerParameters: List[AnnotationLayerParameters], - existingAnnotationLayers: List[AnnotationLayer])( + allAnnotationLayerParameters: List[AnnotationLayerParameters])( implicit ctx: DBAccessContext, mp: MessagesProvider): Fox[List[AnnotationLayer]] = for { @@ -255,7 +256,8 @@ class AnnotationService @Inject()( for { tracing <- createTracingForExplorational(dataset, annotationLayerParameters, - existingAnnotationLayers, + existingAnnotationId = None, + existingAnnotationLayers = List.empty, previousVersion = None) layerName = annotationLayerParameters.name.getOrElse( AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) @@ -293,11 +295,7 @@ class AnnotationService @Inject()( for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" newAnnotationId = ObjectId.generate - annotationLayers <- createLayersForExplorational( - dataset, - newAnnotationId, - annotationLayerParameters, - existingAnnotationLayers = List.empty) ?~> "annotation.createTracings.failed" + annotationLayers <- createLayersForExplorational(dataset, newAnnotationId, annotationLayerParameters) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) @@ -610,10 +608,7 @@ class AnnotationService @Inject()( case Some(_) if skipVolumeData => Fox.successful(None) case Some(tracingId) => tracingStoreClient - .getVolumeData(tracingId, - version = None, - volumeDataZipFormat = volumeDataZipFormat, - voxelSize = dataset.voxelSize) + .getVolumeData(tracingId, volumeDataZipFormat = volumeDataZipFormat, voxelSize = dataset.voxelSize) .map(Some(_)) } } yield tracingDataObjects diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index aabc0e6c8e6..21aafa0d73b 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -37,12 +37,15 @@ class WKRemoteTracingStoreClient( private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" - def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { + def getSkeletonTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, + version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) for { _ <- bool2Fox(annotationLayer.typ == AnnotationLayerType.Skeleton) ?~> "annotation.download.fetch.notSkeleton" skeletonTracing <- rpc(s"${tracingStore.url}/tracings/skeleton/${annotationLayer.tracingId}") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .withLongTimeout .getWithProtoResponse[SkeletonTracing](SkeletonTracing) @@ -196,7 +199,8 @@ class WKRemoteTracingStoreClient( } yield tracingId } - def getVolumeTracing(annotationLayer: AnnotationLayer, + def getVolumeTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, version: Option[Long] = None, skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat, @@ -207,12 +211,14 @@ class WKRemoteTracingStoreClient( tracingId = annotationLayer.tracingId tracing <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .getWithProtoResponse[VolumeTracing](VolumeTracing) data <- Fox.runIf(!skipVolumeData) { rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) @@ -223,7 +229,6 @@ class WKRemoteTracingStoreClient( } def getVolumeData(tracingId: String, - version: Option[Long] = None, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[Array[Byte]] = { logger.debug("Called to get volume data." + baseInfo) @@ -231,7 +236,6 @@ class WKRemoteTracingStoreClient( data <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) - .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) .getWithBytesResponse diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 61513fc9611..1938155bb95 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -12,6 +12,7 @@ message AnnotationProto { required int64 version = 2; repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; + optional bool skeletonMayHavePendingUpdates = 5; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 96577b4d404..7dd5fa80389 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -28,12 +28,11 @@ class EditableMappingController @Inject()( editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { - def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) @@ -65,12 +64,11 @@ class EditableMappingController @Inject()( } } - def agglomerateIdsForSegments(tracingId: String, version: Option[Long]): Action[ListOfLong] = + def agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) annotation <- annotationService.get(annotationId, version) tracing <- annotationService.findVolume(annotationId, tracingId, version) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 0889f7d5ed2..c451cea2132 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -67,12 +67,11 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ed4223553f5..3d7a17c27eb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -102,12 +102,11 @@ class VolumeTracingController @Inject()( } } - def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } @@ -187,7 +186,6 @@ class VolumeTracingController @Inject()( def allDataZip(tracingId: String, volumeDataZipFormat: String, - version: Option[Long], voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => @@ -195,7 +193,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 2a55b74ecf0..b9541f4ee67 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -19,8 +19,8 @@ POST /annotation/mergedFromIds POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) @@ -34,9 +34,9 @@ POST /volume/getMultiple POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents # Editable Mappings -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, version: Option[Long]) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]) POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) @@ -71,6 +71,6 @@ GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From f24d12357a5fd15a6913a157322582d81de725c0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 11:16:46 +0100 Subject: [PATCH 279/361] Revert "require passing annotationid to some tracingstore routes" This reverts commit 856f11b9f52b041280155b7c9f8d18af354aff9f. --- app/controllers/AnnotationIOController.scala | 7 +++---- .../WKRemoteTracingStoreController.scala | 1 - .../AnnotationLayerPrecedence.scala | 16 +++++--------- app/models/annotation/AnnotationService.scala | 21 ++++++++++++------- .../WKRemoteTracingStoreClient.scala | 12 ++++------- webknossos-datastore/proto/Annotation.proto | 1 - .../EditableMappingController.scala | 6 ++++-- .../SkeletonTracingController.scala | 3 ++- .../controllers/VolumeTracingController.scala | 6 ++++-- ...alableminds.webknossos.tracingstore.routes | 10 ++++----- 10 files changed, 40 insertions(+), 43 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 1d42a0fafd6..723e1e79633 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -401,7 +401,7 @@ class AnnotationIOController @Inject()( for { tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedAnnotationLayers <- Fox.serialCombined(annotation.skeletonAnnotationLayers)( - tracingStoreClient.getSkeletonTracing(annotation._id, _, version)) + tracingStoreClient.getSkeletonTracing(_, version)) user <- userService.findOneCached(annotation._user)(GlobalAccessContext) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) nmlStream = nmlWriter.toNmlStream( @@ -433,8 +433,7 @@ class AnnotationIOController @Inject()( tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedVolumeLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.volumeAnnotationLayers) { volumeAnnotationLayer => - tracingStoreClient.getVolumeTracing(annotation._id, - volumeAnnotationLayer, + tracingStoreClient.getVolumeTracing(volumeAnnotationLayer, version, skipVolumeData, volumeDataZipFormat, @@ -442,7 +441,7 @@ class AnnotationIOController @Inject()( } ?~> "annotation.download.fetchVolumeLayer.failed" fetchedSkeletonLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.skeletonAnnotationLayers) { skeletonAnnotationLayer => - tracingStoreClient.getSkeletonTracing(annotation._id, skeletonAnnotationLayer, version) + tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, version) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 118b001ab68..6a8a2dba8da 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -186,7 +186,6 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore dataset <- datasetDAO.findOne(annotation._dataset) tracingEither <- annotationService.createTracingForExplorational(dataset, request.body, - Some(annotation._id), annotation.annotationLayers, Some(previousVersion)) tracing: GeneratedMessage = tracingEither match { diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala index b5c94983966..96ac3ec77fd 100644 --- a/app/models/annotation/AnnotationLayerPrecedence.scala +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -1,6 +1,5 @@ package models.annotation -import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -70,15 +69,13 @@ trait AnnotationLayerPrecedence { ) }.getOrElse(volumeTracing) - protected def getOldPrecedenceLayerProperties(existingAnnotationId: Option[ObjectId], - existingAnnotationLayers: List[AnnotationLayer], + protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = for { - oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationId, - existingAnnotationLayers, + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -107,8 +104,7 @@ trait AnnotationLayerPrecedence { } else Fox.failure("Trying to select precedence layer from empty layer list.") } - private def fetchOldPrecedenceLayer(existingAnnotationIdOpt: Option[ObjectId], - existingAnnotationLayers: List[AnnotationLayer], + private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( @@ -116,13 +112,11 @@ trait AnnotationLayerPrecedence { if (existingAnnotationLayers.isEmpty) Fox.successful(None) else for { - existingAnnotationId <- existingAnnotationIdOpt.toFox ?~> "fetchOldPrecedenceLayer.needsAnnotationId" oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(existingAnnotationId, oldPrecedenceLayer, previousVersion) + tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, previousVersion) else - tracingStoreClient.getVolumeTracing(existingAnnotationId, - oldPrecedenceLayer, + tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, previousVersion, skipVolumeData = true, volumeDataZipFormat = VolumeDataZipFormat.wkw, diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index c6ee592db24..39557adb1ab 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -165,7 +165,6 @@ class AnnotationService @Inject()( def createTracingForExplorational(dataset: Dataset, params: AnnotationLayerParameters, - existingAnnotationId: Option[ObjectId], existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long])( implicit ctx: DBAccessContext, @@ -210,8 +209,7 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationId, - existingAnnotationLayers, + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -247,7 +245,8 @@ class AnnotationService @Inject()( private def createLayersForExplorational(dataset: Dataset, annotationId: ObjectId, - allAnnotationLayerParameters: List[AnnotationLayerParameters])( + allAnnotationLayerParameters: List[AnnotationLayerParameters], + existingAnnotationLayers: List[AnnotationLayer])( implicit ctx: DBAccessContext, mp: MessagesProvider): Fox[List[AnnotationLayer]] = for { @@ -256,8 +255,7 @@ class AnnotationService @Inject()( for { tracing <- createTracingForExplorational(dataset, annotationLayerParameters, - existingAnnotationId = None, - existingAnnotationLayers = List.empty, + existingAnnotationLayers, previousVersion = None) layerName = annotationLayerParameters.name.getOrElse( AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) @@ -295,7 +293,11 @@ class AnnotationService @Inject()( for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" newAnnotationId = ObjectId.generate - annotationLayers <- createLayersForExplorational(dataset, newAnnotationId, annotationLayerParameters) ?~> "annotation.createTracings.failed" + annotationLayers <- createLayersForExplorational( + dataset, + newAnnotationId, + annotationLayerParameters, + existingAnnotationLayers = List.empty) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) @@ -608,7 +610,10 @@ class AnnotationService @Inject()( case Some(_) if skipVolumeData => Fox.successful(None) case Some(tracingId) => tracingStoreClient - .getVolumeData(tracingId, volumeDataZipFormat = volumeDataZipFormat, voxelSize = dataset.voxelSize) + .getVolumeData(tracingId, + version = None, + volumeDataZipFormat = volumeDataZipFormat, + voxelSize = dataset.voxelSize) .map(Some(_)) } } yield tracingDataObjects diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 21aafa0d73b..aabc0e6c8e6 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -37,15 +37,12 @@ class WKRemoteTracingStoreClient( private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" - def getSkeletonTracing(annotationId: ObjectId, - annotationLayer: AnnotationLayer, - version: Option[Long]): Fox[FetchedAnnotationLayer] = { + def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) for { _ <- bool2Fox(annotationLayer.typ == AnnotationLayerType.Skeleton) ?~> "annotation.download.fetch.notSkeleton" skeletonTracing <- rpc(s"${tracingStore.url}/tracings/skeleton/${annotationLayer.tracingId}") .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .withLongTimeout .getWithProtoResponse[SkeletonTracing](SkeletonTracing) @@ -199,8 +196,7 @@ class WKRemoteTracingStoreClient( } yield tracingId } - def getVolumeTracing(annotationId: ObjectId, - annotationLayer: AnnotationLayer, + def getVolumeTracing(annotationLayer: AnnotationLayer, version: Option[Long] = None, skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat, @@ -211,14 +207,12 @@ class WKRemoteTracingStoreClient( tracingId = annotationLayer.tracingId tracing <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .getWithProtoResponse[VolumeTracing](VolumeTracing) data <- Fox.runIf(!skipVolumeData) { rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) - .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) @@ -229,6 +223,7 @@ class WKRemoteTracingStoreClient( } def getVolumeData(tracingId: String, + version: Option[Long] = None, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[Array[Byte]] = { logger.debug("Called to get volume data." + baseInfo) @@ -236,6 +231,7 @@ class WKRemoteTracingStoreClient( data <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) + .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) .getWithBytesResponse diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 1938155bb95..61513fc9611 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -12,7 +12,6 @@ message AnnotationProto { required int64 version = 2; repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; - optional bool skeletonMayHavePendingUpdates = 5; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 7dd5fa80389..96577b4d404 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -28,11 +28,12 @@ class EditableMappingController @Inject()( editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { - def editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) @@ -64,11 +65,12 @@ class EditableMappingController @Inject()( } } - def agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]): Action[ListOfLong] = + def agglomerateIdsForSegments(tracingId: String, version: Option[Long]): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) annotation <- annotationService.get(annotationId, version) tracing <- annotationService.findVolume(annotationId, tracingId, version) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index c451cea2132..0889f7d5ed2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -67,11 +67,12 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 3d7a17c27eb..ed4223553f5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -102,11 +102,12 @@ class VolumeTracingController @Inject()( } } - def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { + annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } @@ -186,6 +187,7 @@ class VolumeTracingController @Inject()( def allDataZip(tracingId: String, volumeDataZipFormat: String, + version: Option[Long], voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => @@ -193,7 +195,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index b9541f4ee67..2a55b74ecf0 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -19,8 +19,8 @@ POST /annotation/mergedFromIds POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) @@ -34,9 +34,9 @@ POST /volume/getMultiple POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents # Editable Mappings -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, version: Option[Long]) POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) @@ -71,6 +71,6 @@ GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From a34c7747de80845b0f7381f5b991aef3578f46cc Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 12:00:45 +0100 Subject: [PATCH 280/361] introduce skeletonMayHavePendingUpdates property to annotation proto to handle special case in migration --- .../Annotation_pb2.py | 12 ++-- .../migration.py | 1 + webknossos-datastore/proto/Annotation.proto | 1 + .../annotation/AnnotationWithTracings.scala | 4 ++ .../annotation/TSAnnotationService.scala | 61 ++++++++++++++++--- 5 files changed, 64 insertions(+), 15 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py index 5b9fa64b93b..d8297d55911 100644 --- a/tools/migration-unified-annotation-versioning/Annotation_pb2.py +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -13,17 +13,17 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xb2\x01\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xd9\x01\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _ANNOTATIONLAYERTYPEPROTO._serialized_start=379 - _ANNOTATIONLAYERTYPEPROTO._serialized_end=431 + _ANNOTATIONLAYERTYPEPROTO._serialized_start=418 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=470 _ANNOTATIONPROTO._serialized_start=61 - _ANNOTATIONPROTO._serialized_end=239 - _ANNOTATIONLAYERPROTO._serialized_start=242 - _ANNOTATIONLAYERPROTO._serialized_end=377 + _ANNOTATIONPROTO._serialized_end=278 + _ANNOTATIONLAYERPROTO._serialized_start=281 + _ANNOTATIONLAYERPROTO._serialized_end=416 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 3f28b04b822..fc5454c896a 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -414,6 +414,7 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se annotationProto.description = annotation["description"] or "" annotationProto.version = version annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings + annotationProto.skeletonMayHavePendingUpdates = True # TODO set this to true less often (e.g. not on single-layer, or when there is no skeleton) for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 61513fc9611..1938155bb95 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -12,6 +12,7 @@ message AnnotationProto { required int64 version = 2; repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; + optional bool skeletonMayHavePendingUpdates = 5; } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 0d17dcfda24..758439035d1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -45,6 +45,10 @@ case class AnnotationWithTracings( case _ => None }.toList + // Assumes that there is at most one skeleton layer per annotation. This is true as of this writing + def getSkeletonId: Option[String] = + getSkeletons.headOption.map(_._1) + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = editableMappingsByTracingId.view.flatMap { case (id, (info: EditableMappingInfo, _)) => Some(id, info) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 3d931cccb14..5d85b760a5e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -120,6 +120,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def currentMaterializedVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + def currentMaterializedSkeletonVersion(tracingId: String): Fox[Long] = + tracingDataStore.skeletons.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def getNewestMaterialized(annotationId: String): Fox[AnnotationProto] = for { keyValuePair <- tracingDataStore.annotations.get[AnnotationProto](annotationId, mayBeEmpty = Some(true))( @@ -280,22 +283,62 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss targetVersion: Long, reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - updateGroupsAsSaved <- findPendingUpdates(annotationId, annotationWithTracingsAndMappings.version, targetVersion) ?~> "findPendingUpdates.failed" + updateGroupsAsSaved <- findPendingUpdates(annotationId, annotationWithTracingsAndMappings, targetVersion) ?~> "findPendingUpdates.failed" updatesGroupsRegrouped = regroupByIsolationSensitiveActions(updateGroupsAsSaved) updated <- applyUpdatesGrouped(annotationWithTracingsAndMappings, annotationId, updatesGroupsRegrouped, reportChangesToWk) ?~> "applyUpdates.inner.failed" - } yield updated + } yield + updated.withVersion(targetVersion) // set version again, because extraSkeleton update filtering may skip latest version - private def findPendingUpdates(annotationId: String, existingVersion: Long, desiredVersion: Long)( + private def findPendingUpdates(annotationId: String, annotation: AnnotationWithTracings, desiredVersion: Long)( implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = - if (desiredVersion == existingVersion) Fox.successful(List()) - else { - tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( - annotationId, - Some(desiredVersion), - Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + for { + extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation) + _ = logger.info(s"${extraSkeletonUpdates.length} extraSkeletonUpdates") + existingVersion = annotation.version + pendingAnnotationUpdates <- if (desiredVersion == existingVersion) Fox.successful(List.empty) + else { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(desiredVersion), + Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } + } yield extraSkeletonUpdates ++ pendingAnnotationUpdates + + /* + * The migration of https://github.com/scalableminds/webknossos/pull/7917 does not guarantee that the skeleton layer + * is materialized at the same version as the annottation. So even if we have an existing annotation version, + * we may fetch skeleton updates *older* than it, in order to fully construct the state of that version. + * Only annotations from before that migration have this skeletonMayHavePendingUpdates=Some(true). + */ + private def findExtraSkeletonUpdates(annotationId: String, annotation: AnnotationWithTracings)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { + annotation.getSkeletonId.map { skeletonId => + for { + materializedSkeletonVersion <- currentMaterializedSkeletonVersion(skeletonId) + extraUpdates <- if (materializedSkeletonVersion < annotation.version) { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(annotation.version), + Some(materializedSkeletonVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } else Fox.successful(List.empty) + extraSkeletonUpdates = filterSkeletonUpdates(extraUpdates) + } yield extraSkeletonUpdates + }.getOrElse(Fox.successful(List.empty)) + } else Fox.successful(List.empty) + + private def filterSkeletonUpdates( + updateGroups: List[(Long, List[UpdateAction])]): List[(Long, List[SkeletonUpdateAction])] = + updateGroups.map { + case (version, updateGroup) => + val updateGroupFiltered = updateGroup.flatMap { + case a: SkeletonUpdateAction => Some(a) + case _ => None + } + (version, updateGroupFiltered) } private def findTracingsForAnnotation(annotation: AnnotationProto)( From 4f90eaad2840332b62d0df31e86be009e10fe52c Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 12:01:03 +0100 Subject: [PATCH 281/361] Revert "Revert "require passing annotationid to some tracingstore routes"" This reverts commit f24d12357a5fd15a6913a157322582d81de725c0. --- app/controllers/AnnotationIOController.scala | 7 ++++--- .../WKRemoteTracingStoreController.scala | 1 + .../AnnotationLayerPrecedence.scala | 16 +++++++++----- app/models/annotation/AnnotationService.scala | 21 +++++++------------ .../WKRemoteTracingStoreClient.scala | 12 +++++++---- .../EditableMappingController.scala | 6 ++---- .../SkeletonTracingController.scala | 3 +-- .../controllers/VolumeTracingController.scala | 6 ++---- ...alableminds.webknossos.tracingstore.routes | 10 ++++----- 9 files changed, 42 insertions(+), 40 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 723e1e79633..1d42a0fafd6 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -401,7 +401,7 @@ class AnnotationIOController @Inject()( for { tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedAnnotationLayers <- Fox.serialCombined(annotation.skeletonAnnotationLayers)( - tracingStoreClient.getSkeletonTracing(_, version)) + tracingStoreClient.getSkeletonTracing(annotation._id, _, version)) user <- userService.findOneCached(annotation._user)(GlobalAccessContext) taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) nmlStream = nmlWriter.toNmlStream( @@ -433,7 +433,8 @@ class AnnotationIOController @Inject()( tracingStoreClient <- tracingStoreService.clientFor(dataset) fetchedVolumeLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.volumeAnnotationLayers) { volumeAnnotationLayer => - tracingStoreClient.getVolumeTracing(volumeAnnotationLayer, + tracingStoreClient.getVolumeTracing(annotation._id, + volumeAnnotationLayer, version, skipVolumeData, volumeDataZipFormat, @@ -441,7 +442,7 @@ class AnnotationIOController @Inject()( } ?~> "annotation.download.fetchVolumeLayer.failed" fetchedSkeletonLayers: List[FetchedAnnotationLayer] <- Fox.serialCombined(annotation.skeletonAnnotationLayers) { skeletonAnnotationLayer => - tracingStoreClient.getSkeletonTracing(skeletonAnnotationLayer, version) + tracingStoreClient.getSkeletonTracing(annotation._id, skeletonAnnotationLayer, version) } ?~> "annotation.download.fetchSkeletonLayer.failed" user <- userService.findOneCached(annotation._user)(GlobalAccessContext) ?~> "annotation.download.findUser.failed" taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne(_)(GlobalAccessContext)) ?~> "task.notFound" diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 6a8a2dba8da..118b001ab68 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -186,6 +186,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore dataset <- datasetDAO.findOne(annotation._dataset) tracingEither <- annotationService.createTracingForExplorational(dataset, request.body, + Some(annotation._id), annotation.annotationLayers, Some(previousVersion)) tracing: GeneratedMessage = tracingEither match { diff --git a/app/models/annotation/AnnotationLayerPrecedence.scala b/app/models/annotation/AnnotationLayerPrecedence.scala index 96ac3ec77fd..b5c94983966 100644 --- a/app/models/annotation/AnnotationLayerPrecedence.scala +++ b/app/models/annotation/AnnotationLayerPrecedence.scala @@ -1,5 +1,6 @@ package models.annotation +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing @@ -69,13 +70,15 @@ trait AnnotationLayerPrecedence { ) }.getOrElse(volumeTracing) - protected def getOldPrecedenceLayerProperties(existingAnnotationLayers: List[AnnotationLayer], + protected def getOldPrecedenceLayerProperties(existingAnnotationId: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( implicit ec: ExecutionContext): Fox[Option[RedundantTracingProperties]] = for { - oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationLayers, + oldPrecedenceLayer <- fetchOldPrecedenceLayer(existingAnnotationId, + existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -104,7 +107,8 @@ trait AnnotationLayerPrecedence { } else Fox.failure("Trying to select precedence layer from empty layer list.") } - private def fetchOldPrecedenceLayer(existingAnnotationLayers: List[AnnotationLayer], + private def fetchOldPrecedenceLayer(existingAnnotationIdOpt: Option[ObjectId], + existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long], dataset: Dataset, tracingStoreClient: WKRemoteTracingStoreClient)( @@ -112,11 +116,13 @@ trait AnnotationLayerPrecedence { if (existingAnnotationLayers.isEmpty) Fox.successful(None) else for { + existingAnnotationId <- existingAnnotationIdOpt.toFox ?~> "fetchOldPrecedenceLayer.needsAnnotationId" oldPrecedenceLayer <- selectLayerWithPrecedence(existingAnnotationLayers) oldPrecedenceLayerFetched <- if (oldPrecedenceLayer.typ == AnnotationLayerType.Skeleton) - tracingStoreClient.getSkeletonTracing(oldPrecedenceLayer, previousVersion) + tracingStoreClient.getSkeletonTracing(existingAnnotationId, oldPrecedenceLayer, previousVersion) else - tracingStoreClient.getVolumeTracing(oldPrecedenceLayer, + tracingStoreClient.getVolumeTracing(existingAnnotationId, + oldPrecedenceLayer, previousVersion, skipVolumeData = true, volumeDataZipFormat = VolumeDataZipFormat.wkw, diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 39557adb1ab..c6ee592db24 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -165,6 +165,7 @@ class AnnotationService @Inject()( def createTracingForExplorational(dataset: Dataset, params: AnnotationLayerParameters, + existingAnnotationId: Option[ObjectId], existingAnnotationLayers: List[AnnotationLayer], previousVersion: Option[Long])( implicit ctx: DBAccessContext, @@ -209,7 +210,8 @@ class AnnotationService @Inject()( We do this for *every* new layer, since we only later get its ID which determines the actual precedence. All of this is skipped if existingAnnotationLayers is empty. */ - oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationLayers, + oldPrecedenceLayerProperties <- getOldPrecedenceLayerProperties(existingAnnotationId, + existingAnnotationLayers, previousVersion, dataset, tracingStoreClient) @@ -245,8 +247,7 @@ class AnnotationService @Inject()( private def createLayersForExplorational(dataset: Dataset, annotationId: ObjectId, - allAnnotationLayerParameters: List[AnnotationLayerParameters], - existingAnnotationLayers: List[AnnotationLayer])( + allAnnotationLayerParameters: List[AnnotationLayerParameters])( implicit ctx: DBAccessContext, mp: MessagesProvider): Fox[List[AnnotationLayer]] = for { @@ -255,7 +256,8 @@ class AnnotationService @Inject()( for { tracing <- createTracingForExplorational(dataset, annotationLayerParameters, - existingAnnotationLayers, + existingAnnotationId = None, + existingAnnotationLayers = List.empty, previousVersion = None) layerName = annotationLayerParameters.name.getOrElse( AnnotationLayer.defaultNameForType(annotationLayerParameters.typ)) @@ -293,11 +295,7 @@ class AnnotationService @Inject()( for { dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" newAnnotationId = ObjectId.generate - annotationLayers <- createLayersForExplorational( - dataset, - newAnnotationId, - annotationLayerParameters, - existingAnnotationLayers = List.empty) ?~> "annotation.createTracings.failed" + annotationLayers <- createLayersForExplorational(dataset, newAnnotationId, annotationLayerParameters) ?~> "annotation.createTracings.failed" teamId <- selectSuitableTeam(user, dataset) ?~> "annotation.create.forbidden" annotation = Annotation(newAnnotationId, datasetId, None, teamId, user._id, annotationLayers) _ <- annotationDAO.insertOne(annotation) @@ -610,10 +608,7 @@ class AnnotationService @Inject()( case Some(_) if skipVolumeData => Fox.successful(None) case Some(tracingId) => tracingStoreClient - .getVolumeData(tracingId, - version = None, - volumeDataZipFormat = volumeDataZipFormat, - voxelSize = dataset.voxelSize) + .getVolumeData(tracingId, volumeDataZipFormat = volumeDataZipFormat, voxelSize = dataset.voxelSize) .map(Some(_)) } } yield tracingDataObjects diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index aabc0e6c8e6..21aafa0d73b 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -37,12 +37,15 @@ class WKRemoteTracingStoreClient( private def baseInfo = s" Dataset: ${dataset.name} Tracingstore: ${tracingStore.url}" - def getSkeletonTracing(annotationLayer: AnnotationLayer, version: Option[Long]): Fox[FetchedAnnotationLayer] = { + def getSkeletonTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, + version: Option[Long]): Fox[FetchedAnnotationLayer] = { logger.debug("Called to get SkeletonTracing." + baseInfo) for { _ <- bool2Fox(annotationLayer.typ == AnnotationLayerType.Skeleton) ?~> "annotation.download.fetch.notSkeleton" skeletonTracing <- rpc(s"${tracingStore.url}/tracings/skeleton/${annotationLayer.tracingId}") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .withLongTimeout .getWithProtoResponse[SkeletonTracing](SkeletonTracing) @@ -196,7 +199,8 @@ class WKRemoteTracingStoreClient( } yield tracingId } - def getVolumeTracing(annotationLayer: AnnotationLayer, + def getVolumeTracing(annotationId: ObjectId, + annotationLayer: AnnotationLayer, version: Option[Long] = None, skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat, @@ -207,12 +211,14 @@ class WKRemoteTracingStoreClient( tracingId = annotationLayer.tracingId tracing <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .getWithProtoResponse[VolumeTracing](VolumeTracing) data <- Fox.runIf(!skipVolumeData) { rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) + .addQueryString("annotationId" -> annotationId.toString) .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) @@ -223,7 +229,6 @@ class WKRemoteTracingStoreClient( } def getVolumeData(tracingId: String, - version: Option[Long] = None, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[Array[Byte]] = { logger.debug("Called to get volume data." + baseInfo) @@ -231,7 +236,6 @@ class WKRemoteTracingStoreClient( data <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/allDataZip").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("volumeDataZipFormat" -> volumeDataZipFormat.toString) - .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("voxelSizeFactor", voxelSize.map(_.factor.toUriLiteral)) .addQueryStringOptional("voxelSizeUnit", voxelSize.map(_.unit.toString)) .getWithBytesResponse diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 96577b4d404..7dd5fa80389 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -28,12 +28,11 @@ class EditableMappingController @Inject()( editableMappingService: EditableMappingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { - def editableMappingInfo(tracingId: String, version: Option[Long]): Action[AnyContent] = + def editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId, version) @@ -65,12 +64,11 @@ class EditableMappingController @Inject()( } } - def agglomerateIdsForSegments(tracingId: String, version: Option[Long]): Action[ListOfLong] = + def agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) annotation <- annotationService.get(annotationId, version) tracing <- annotationService.findVolume(annotationId, tracingId, version) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 0889f7d5ed2..c451cea2132 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -67,12 +67,11 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin } } - def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ed4223553f5..3d7a17c27eb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -102,12 +102,11 @@ class VolumeTracingController @Inject()( } } - def get(tracingId: String, version: Option[Long]): Action[AnyContent] = + def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) } @@ -187,7 +186,6 @@ class VolumeTracingController @Inject()( def allDataZip(tracingId: String, volumeDataZipFormat: String, - version: Option[Long], voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => @@ -195,7 +193,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") + tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 2a55b74ecf0..b9541f4ee67 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -19,8 +19,8 @@ POST /annotation/mergedFromIds POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save(newTracingId: Option[String]) POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) -GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, version: Option[Long], voxelSize: Option[String], voxelSizeUnit: Option[String]) +GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) @@ -34,9 +34,9 @@ POST /volume/getMultiple POST /volume/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromContents # Editable Mappings -GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, version: Option[Long]) +GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.segmentIdsForAgglomerate(tracingId: String, agglomerateId: Long) -POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, version: Option[Long]) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]) POST /mapping/:tracingId/agglomerateGraphMinCut @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphMinCut(tracingId: String) POST /mapping/:tracingId/agglomerateGraphNeighbors @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateGraphNeighbors(tracingId: String) GET /mapping/:tracingId/agglomerateSkeleton/:agglomerateId @com.scalableminds.webknossos.tracingstore.controllers.EditableMappingController.agglomerateSkeleton(tracingId: String, agglomerateId: Long) @@ -71,6 +71,6 @@ GET /volume/zarr3_experimental/:tracingId/:mag/:coordinates POST /skeleton/save @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.save() POST /skeleton/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.saveMultiple() POST /skeleton/mergedFromContents @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.mergedFromContents -GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, version: Option[Long]) +GET /skeleton/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) POST /skeleton/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.duplicate(tracingId: String, editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) POST /skeleton/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.SkeletonTracingController.getMultiple From 406d7ee9f083ead75feade17ce2b98918adff7af Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 12:18:56 +0100 Subject: [PATCH 282/361] set skeletonMayHavePendingUpdates to None after applying updates --- webknossos-datastore/proto/Annotation.proto | 2 +- .../tracingstore/annotation/AnnotationWithTracings.scala | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 1938155bb95..9e276a67a42 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -12,7 +12,7 @@ message AnnotationProto { required int64 version = 2; repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; - optional bool skeletonMayHavePendingUpdates = 5; + optional bool skeletonMayHavePendingUpdates = 5; // relevant only for annotations migrated by https://github.com/scalableminds/webknossos/pull/7917 } message AnnotationLayerProto { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 758439035d1..ea7c1cc0837 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -115,7 +115,8 @@ case class AnnotationWithTracings( case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) } - this.copy(annotation = annotation.copy(version = newVersion), tracingsById = tracingsUpdated.toMap) + this.copy(annotation = annotation.copy(version = newVersion, skeletonMayHavePendingUpdates = None), + tracingsById = tracingsUpdated.toMap) } def withNewUpdaters(materializedVersion: Long, targetVersion: Long): AnnotationWithTracings = { From 0fecca6051892c35e0e1020f1b811d3f45c60228 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 13:58:29 +0100 Subject: [PATCH 283/361] migration: set the bool only where it makes sense --- .../migration.py | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index fc5454c896a..0eb75d6c7f5 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -73,7 +73,6 @@ def migrate_annotation(self, annotation): mapping_id_map = self.build_mapping_id_map(annotation) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) - logger.info(f"saved materialized versions {materialized_versions}") if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") self.create_and_save_annotation_proto(annotation, materialized_versions) @@ -147,7 +146,6 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers unified_version += 1 version_mapping[tracing_id][version] = unified_version - logger.info(f"saving update v{unified_version}") self.save_update_group(annotation['_id'], unified_version, update_group) if element_index + 1 < len(all_update_groups[layer_index]): @@ -295,7 +293,6 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe skeleton.version = new_version value_bytes = skeleton.SerializeToString() materialized_versions_unified.append(new_version) - logger.info(f"saving materialized skeleton {new_version}") self.save_bytes(collection, tracing_id, new_version, value_bytes) return materialized_versions_unified @@ -316,7 +313,6 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers volume.mappingName = tracing_id value_bytes = volume.SerializeToString() materialized_versions_unified.append(new_version) - logger.info(f"saving materialized volume {new_version}") self.save_bytes(collection, tracing_id, new_version, value_bytes) return materialized_versions_unified @@ -409,12 +405,14 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi ) def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int]): + skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() annotationProto.description = annotation["description"] or "" annotationProto.version = version annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings - annotationProto.skeletonMayHavePendingUpdates = True # TODO set this to true less often (e.g. not on single-layer, or when there is no skeleton) + if skeleton_may_have_pending_updates: + annotationProto.skeletonMayHavePendingUpdates = True for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id @@ -426,6 +424,17 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se annotationProto.annotationLayers.append(layer_proto) self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) + def skeleton_may_have_pending_updates(self, annotation) -> bool: + # Skeletons in the old code had their updates applied lazily. + # Thus, the current materialized skeleton may not be up to date + # But since we are writing materialized annotationProto for every materialized version from every layer + # the skeleton must be marked as skeletonMayHavePendingUpdates + # We do this always, except if there is no skeleton, + # or if it is the only layer (then the materialized set matches) + if len(annotation["layers"]) < 2: + return False + return "Skeleton" in annotation["layers"].values() + def read_annotation_list(self): before = time.time() start_time = datetime.datetime.now() From d0248b4f3443f6b7dac6e4005df00de05f24a775 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 14:10:19 +0100 Subject: [PATCH 284/361] migration: set earliest_accessible_version for editable mapping annotations --- .../migration.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 0eb75d6c7f5..052649faba4 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -75,7 +75,7 @@ def migrate_annotation(self, annotation): materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") - self.create_and_save_annotation_proto(annotation, materialized_versions) + self.create_and_save_annotation_proto(annotation, materialized_versions, mapping_id_map) if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) except Exception: @@ -404,13 +404,21 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi transform_key=partial(self.replace_before_first_slash, tracing_id) ) - def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int]): + def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) + earliest_accessible_version = 0 + if len(mapping_id_map) > 0: + # An editable mapping exists in this annotation. + # Merged editable mappings have updates in non-chronological order, + # so accessing their merged update history will lead to unexpected behavior. + # So we forbid it. + earliest_accessible_version = max(materialized_versions) + # We write an annotationProto object for every materialized version of every layer. for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() annotationProto.description = annotation["description"] or "" annotationProto.version = version - annotationProto.earliestAccessibleVersion = 0 # TODO different for merged editable mappings + annotationProto.earliestAccessibleVersion = earliest_accessible_version if skeleton_may_have_pending_updates: annotationProto.skeletonMayHavePendingUpdates = True for tracing_id, tracing_type in annotation["layers"].items(): From f4b6068321b20cefc3d6276fc1f1c5ff8a9257e2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 14:37:58 +0100 Subject: [PATCH 285/361] don't fail on loading addSegmentIndex and compactVolumeActions --- .../webknossos/tracingstore/annotation/UpdateActions.scala | 5 +++++ .../tracingstore/tracings/volume/VolumeUpdateActions.scala | 1 + 2 files changed, 6 insertions(+) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala index e0aa026db5b..fd2db0280e7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateActions.scala @@ -72,6 +72,7 @@ object UpdateAction { case "deleteSegment" => deserialize[DeleteSegmentVolumeAction](jsonValue) case "deleteSegmentData" => deserialize[DeleteSegmentDataVolumeAction](jsonValue) case "updateMappingName" => deserialize[UpdateMappingNameVolumeAction](jsonValue) + case "addSegmentIndex" => deserialize[AddSegmentIndexVolumeAction](jsonValue) // Editable Mapping case "mergeAgglomerate" => deserialize[MergeAgglomerateUpdateAction](jsonValue) @@ -168,6 +169,10 @@ object UpdateAction { Json.obj("name" -> "updateSegmentGroups", "value" -> Json.toJson(s)(UpdateSegmentGroupsVolumeAction.jsonFormat)) case s: UpdateMappingNameVolumeAction => Json.obj("name" -> "updateMappingName", "value" -> Json.toJson(s)(UpdateMappingNameVolumeAction.jsonFormat)) + case s: AddSegmentIndexVolumeAction => + Json.obj("name" -> "addSegmentIndex", "value" -> Json.toJson(s)(AddSegmentIndexVolumeAction.jsonFormat)) + case s: CompactVolumeUpdateAction => + Json.toJson(s) // Editable Mapping case s: SplitAgglomerateUpdateAction => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index d977e0cd4f0..f7cb2a2808f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -169,6 +169,7 @@ case class ImportVolumeDataVolumeAction(actionTracingId: String, tracing.copy(largestSegmentId = largestSegmentId) } +// The current code no longer creates these actions, but they are in the history of some volume annotations. case class AddSegmentIndexVolumeAction(actionTracingId: String, actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, From f6d75543b52f968977ec0eacb27bb026af08a921 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 15:14:42 +0100 Subject: [PATCH 286/361] checkpoints for migration --- .../main.py | 1 + .../migration.py | 58 ++++++++++++------- .../utils.py | 24 ++++---- 3 files changed, 49 insertions(+), 34 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 2590da14469..221e1646142 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -20,6 +20,7 @@ def main(): parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") parser.add_argument("--previous_start", help="Previous run start time. Example: 2024-11-27 10:37:30.171083", type=str) parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") + parser.add_argument("--previous_checkpoints", help="Supply checkpoints file of a previous run to resume", type=str) parser.add_argument("--verbose", "-v", help="Print for every annotation", action="store_true") args = parser.parse_args() if args.dst is None and not args.dry: diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 052649faba4..7947a72c9cf 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -4,6 +4,7 @@ import math import logging import datetime +from pathlib import Path import time from typing import Dict, Tuple, List, Optional, Callable, Set from rich.progress import track @@ -17,10 +18,11 @@ import VolumeTracing_pb2 as Volume import SkeletonTracing_pb2 as Skeleton import Annotation_pb2 as AnnotationProto -from utils import log_since, batch_range, humanize_time_diff +from utils import log_since, batch_range, format_duration, time_str from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success -logger = logging.getLogger(__name__) +logger = logging.getLogger("migration-logs") +checkpoint_logger = logging.getLogger("migration-checkpoints") LayerVersionMapping = Dict[str, Dict[int, int]] # tracing id to (old version to new version) @@ -46,6 +48,7 @@ def __init__(self, args): def run(self): self.before = time.time() annotations = self.read_annotation_list() + self.setup_checkpoint_logging() self.done_count = 0 self.failure_count = 0 self.total_count = len(annotations) @@ -78,6 +81,7 @@ def migrate_annotation(self, annotation): self.create_and_save_annotation_proto(annotation, materialized_versions, mapping_id_map) if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) + checkpoint_logger.info(annotation['_id']) except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: @@ -244,8 +248,6 @@ def update_collection_for_layer_type(self, layer_type): return "volumeUpdates" def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> Set[int]: - #newest_to_materialize = self.get_newest_to_materialize(annotation, layer_version_mapping, mapping_id_map) - materialized_versions = set() for tracing_id, tracing_type in annotation["layers"].items(): materialized_versions_of_layer = \ @@ -253,21 +255,6 @@ def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_map materialized_versions.update(materialized_versions_of_layer) return materialized_versions - def get_newest_to_materialize(self, annotation, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> int: - newest_to_materialize = 0 - for tracing_id, tracing_type in annotation["layers"].items(): - if tracing_type == "Skeleton": - collection = "skeletons" - else: - collection = "volumes" - # TODO what if the newest materialized is dropped because of a revert? - newest_materialized = layer_version_mapping[tracing_id][self.get_newest_version(tracing_id, collection)] - newest_to_materialize = min(newest_to_materialize, newest_materialized) - for editable_mapping_id in mapping_id_map.values(): - newest_materialized = layer_version_mapping[editable_mapping_id][self.get_newest_version(editable_mapping_id, "editableMappingsInfo")] - newest_to_materialize = min(newest_to_materialize, newest_materialized) - return newest_to_materialize - def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if layer_type == "Skeleton": return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) @@ -444,6 +431,7 @@ def skeleton_may_have_pending_updates(self, annotation) -> bool: return "Skeleton" in annotation["layers"].values() def read_annotation_list(self): + checkpoint_set = self.read_checkpoints() before = time.time() start_time = datetime.datetime.now() previous_start_label = "" @@ -482,9 +470,12 @@ def read_annotation_list(self): GROUP BY a._id, a.name, a.description, a.created, a.modified """ cursor.execute(query) - annotations += cursor.fetchall() + rows = cursor.fetchall() + for row in rows: + if len(checkpoint_set) == 0 or row["_id"] not in checkpoint_set: + annotations.append(row) if annotation_count != len(annotations): - logger.info(f"Note that only {len(annotations)} of the {annotation_count} annotations have layers. Skipping zero-layer annotations.") + logger.info(f"Using {len(annotations)} of the full {annotation_count} annotations (after filtering out zero-layer and already-checkpointed annotations).") log_since(before, "Loading annotation infos from postgres") return annotations @@ -505,7 +496,30 @@ def get_progress(self) -> str: duration = time.time() - self.before if done_count > 0: etr = duration / done_count * (self.total_count - done_count) - etr_formatted = f". ETR {humanize_time_diff(etr)})" + etr_formatted = f". ETR {format_duration(etr)})" else: etr_formatted = ")" return f". ({done_count}/{self.total_count} = {percentage:.1f}% done{etr_formatted}" + + def read_checkpoints(self) -> Set[str]: + if self.args.previous_checkpoints is None: + return set() + with open(self.args.previous_checkpoints, 'r') as previous_checkpoints_file: + previous_checkpoints = set(line.strip() for line in previous_checkpoints_file) + logger.info(f"Using checkpoints from previous run with {len(previous_checkpoints)} entries.") + return previous_checkpoints + + def setup_checkpoint_logging(self): + # We are abusing the logging module to write the checkpoints, as they are thread-safe and provide a file-handler + checkpoint_logger.setLevel(logging.INFO) + checkpoints_path = Path("checkpoints") + checkpoints_path.mkdir(exist_ok=True) + if self.args.previous_checkpoints is not None: + checkpoint_file = self.args.previous_checkpoints + logger.info(f"Appending to supplied checkpoint file at {checkpoint_file}") + else: + checkpoint_file = f"{checkpoints_path}/{time_str()}.log" + logger.info(f"Writing checkpoint file at {checkpoint_file}") + checkpoints_file_handler = logging.FileHandler(checkpoint_file) + checkpoint_logger.addHandler(checkpoints_file_handler) + diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index b2cd81e092c..91def43fcba 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -6,34 +6,34 @@ from datetime import datetime from pathlib import Path -logger = logging.getLogger(__name__) +logger = logging.getLogger("migration-logs") def setup_logging(): - root = logging.getLogger() - root.setLevel(logging.DEBUG) + logger.setLevel(logging.DEBUG) formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(threadName)-24s %(message)s") stdout_handler = logging.StreamHandler(sys.stdout) stdout_handler.setLevel(logging.DEBUG) stdout_handler.setFormatter(formatter) - root.addHandler(stdout_handler) - - time_str = datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f") + logger.addHandler(stdout_handler) logs_path = Path("logs") logs_path.mkdir(exist_ok=True) - - file_handler = logging.FileHandler(f"logs/{time_str}.log") - stdout_handler.setLevel(logging.DEBUG) + file_handler = logging.FileHandler(f"{logs_path}/{time_str()}.log") + file_handler.setLevel(logging.DEBUG) file_handler.setFormatter(formatter) - root.addHandler(file_handler) + logger.addHandler(file_handler) + + +def time_str() -> str: + return datetime.now().strftime("%Y-%m-%d_%H-%M-%S.%f") def log_since(before, label: str, postfix: str = "") -> None: diff = time.time() - before - logger.info(f"{label} took {humanize_time_diff(diff)}{postfix}") + logger.info(f"{label} took {format_duration(diff)}{postfix}") def batch_range( @@ -48,7 +48,7 @@ def batch_range( return -def humanize_time_diff(seconds: float) -> str: +def format_duration(seconds: float) -> str: def pluralize(string: str, amount: int) -> str: return string if amount == 1 else string + "s" From a0ff64ebecdd5c37023288e0591a50551250d9d0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 9 Dec 2024 15:17:52 +0100 Subject: [PATCH 287/361] cleanup in migration --- .../generated/__init__.py | 0 tools/migration-unified-annotation-versioning/requirements.txt | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) delete mode 100644 tools/migration-unified-annotation-versioning/generated/__init__.py diff --git a/tools/migration-unified-annotation-versioning/generated/__init__.py b/tools/migration-unified-annotation-versioning/generated/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt index b9ccd512c41..db02b494a13 100644 --- a/tools/migration-unified-annotation-versioning/requirements.txt +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -1,6 +1,6 @@ grpcio argparse -psycopg2 +psycopg2-binary protobuf rich msgspec From 2109a68c9f35cfa101be61f96e50845876f2613e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 9 Dec 2024 17:44:05 +0100 Subject: [PATCH 288/361] pass annotationId to routes that require it (back-end cannot find the annotation on version restore, though) --- frontend/javascripts/admin/admin_rest_api.ts | 19 ++++++++++++------- .../bucket_data_handling/wkstore_adapter.ts | 4 +++- .../oxalis/model/sagas/mapping_saga.ts | 1 + .../oxalis/model/sagas/proofread_saga.ts | 2 ++ .../oxalis/model_initialization.ts | 4 +++- .../javascripts/oxalis/view/version_list.tsx | 16 +--------------- 6 files changed, 22 insertions(+), 24 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 71b4e8620e6..f80da9b70e5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -719,14 +719,13 @@ export async function acquireAnnotationMutex( } export async function getTracingForAnnotationType( - // todop: revert to APIAnnotation - annotation: { tracingStore: { url: string } }, + annotation: APIAnnotation, annotationLayerDescriptor: AnnotationLayerDescriptor, version?: number | null | undefined, ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const tracingType = typ.toLowerCase() as "skeleton" | "volume"; - const params = new URLSearchParams(); + const params = new URLSearchParams({ annotationId: annotation.id }); if (version != null) { params.append("version", version.toString()); } @@ -1451,10 +1450,15 @@ export function fetchMapping( export function getEditableMappingInfo( tracingStoreUrl: string, tracingId: string, + annotationId: string, ): Promise { - return doWithToken((token) => - Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?token=${token}`), - ); + return doWithToken((token) => { + const params = new URLSearchParams({ + token, + annotationId: `${annotationId}`, + }); + return Request.receiveJSON(`${tracingStoreUrl}/tracings/mapping/${tracingId}/info?${params}`); + }); } export function getPositionForSegmentInAgglomerate( @@ -1961,9 +1965,10 @@ export async function getAgglomeratesForSegmentsFromTracingstore, + annotationId: string, version?: number | null | undefined, ): Promise { - const params = new URLSearchParams(); + const params = new URLSearchParams({ annotationId }); if (version != null) { params.append("version", version.toString()); } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index aea155d7861..172a31fb2d1 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -101,13 +101,15 @@ export async function requestWithFallback( const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.tracing.tracingStore.url; + const { annotationId } = state.tracing; const getDataStoreUrl = (optLayerName?: string) => `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${ optLayerName || layerInfo.name }`; - const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; + const getTracingStoreUrl = () => + `${tracingStoreHost}/tracings/volume/${layerInfo.name}?annotationId=${annotationId}`; const maybeVolumeTracing = "tracingId" in layerInfo && layerInfo.tracingId != null diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index 0782a1b8535..e67cd4aadf9 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -463,6 +463,7 @@ function* updateLocalHdf5Mapping( annotation.tracingStore.url, editableMapping.tracingId, Array.from(newSegmentIds), + annotation.annotationId, annotation.version, ) : yield* call( diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 2ce93a79633..6f93c64d76d 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -1281,6 +1281,7 @@ function* splitAgglomerateInMapping( .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) .map(([segmentId, _agglomerateId]) => segmentId); + const annotationId = yield* select((state) => state.tracing.annotationId); const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); // Ask the server to map the (split) segment ids. This creates a partial mapping // that only contains these ids. @@ -1289,6 +1290,7 @@ function* splitAgglomerateInMapping( tracingStoreUrl, volumeTracingId, splitSegmentIds, + annotationId, ); // Create a new mapping which is equal to the old one with the difference that diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index b14139a20cb..f864ed035a2 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -229,6 +229,7 @@ export async function initialize( const editableMappings = await fetchEditableMappings( annotation.tracingStore.url, serverVolumeTracings, + annotation.id, ); if (annotationProto == null) { // Satisfy TS. annotationProto should always exist if annotation exists. @@ -274,10 +275,11 @@ async function fetchParallel( async function fetchEditableMappings( tracingStoreUrl: string, serverVolumeTracings: ServerVolumeTracing[], + annotationId: string, ): Promise { const promises = serverVolumeTracings .filter((tracing) => tracing.hasEditableMapping) - .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id)); + .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id, annotationId)); return Promise.all(promises); } diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 436884d01bb..c135dee1bc5 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -63,21 +63,7 @@ export async function previewVersion(version?: number) { const params = new URLSearchParams(); params.append("showVersionRestore", "true"); params.append("version", `${version}`); - // todop: do this - // location.href = `${location.origin}/annotations/${annotationId}?${params}${location.hash}`; - - // todop: remove this (it's only for testing) - for (const layer of annotationProto.annotationLayers) { - await getTracingForAnnotationType( - state.tracing, - { - name: "irrelevant hopefully", - tracingId: layer.tracingId, - typ: layer.type, - }, - version, - ); - } + location.href = `${location.origin}/annotations/${annotationId}?${params}${location.hash}`; return; } From e688945874c31e96d8db13b958c9505f99294941 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 10 Dec 2024 09:29:53 +0100 Subject: [PATCH 289/361] also require annotation id in volume data request --- .../tracingstore/controllers/VolumeTracingController.scala | 3 +-- .../conf/com.scalableminds.webknossos.tracingstore.routes | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 3d7a17c27eb..6515e3c6a8e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -210,12 +210,11 @@ class VolumeTracingController @Inject()( } } - def data(tracingId: String): Action[List[WebknossosDataRequest]] = + def data(annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) { val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index b9541f4ee67..160d0f9b779 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -21,7 +21,7 @@ POST /volume/:tracingId/initialData POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) -POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) +POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String, annotationId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, minMag: Option[Int], maxMag: Option[Int], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) From bbdd8b7bcd8f8b87b07d802d9764990ab7559acb Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 10 Dec 2024 09:59:55 +0100 Subject: [PATCH 290/361] fix annotationId param for data route --- .../bucket_data_handling/wkstore_adapter.ts | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 172a31fb2d1..c1e4662b854 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -101,15 +101,13 @@ export async function requestWithFallback( const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.tracing.tracingStore.url; - const { annotationId } = state.tracing; const getDataStoreUrl = (optLayerName?: string) => `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${ optLayerName || layerInfo.name }`; - const getTracingStoreUrl = () => - `${tracingStoreHost}/tracings/volume/${layerInfo.name}?annotationId=${annotationId}`; + const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; const maybeVolumeTracing = "tracingId" in layerInfo && layerInfo.tracingId != null @@ -126,7 +124,13 @@ export async function requestWithFallback( const requestUrl = shouldUseDataStore ? getDataStoreUrl(maybeVolumeTracing?.fallbackLayer) : getTracingStoreUrl(); - const bucketBuffers = await requestFromStore(requestUrl, layerInfo, batch, maybeVolumeTracing); + const bucketBuffers = await requestFromStore( + requestUrl, + layerInfo, + batch, + maybeVolumeTracing, + maybeVolumeTracing != null ? state.tracing.annotationId : undefined, + ); const missingBucketIndices = getNullIndices(bucketBuffers); // If buckets could not be found on the tracing store (e.g. this happens when the buckets @@ -156,6 +160,7 @@ export async function requestWithFallback( layerInfo, fallbackBatch, maybeVolumeTracing, + maybeVolumeTracing != null ? state.tracing.annotationId : undefined, true, ); return bucketBuffers.map((bucket, idx) => { @@ -172,6 +177,7 @@ export async function requestFromStore( layerInfo: DataLayerType, batch: Array, maybeVolumeTracing: VolumeTracing | null | undefined, + maybeAnnotationId: string | undefined, isVolumeFallback: boolean = false, ): Promise> { const state = Store.getState(); @@ -215,8 +221,14 @@ export async function requestFromStore( try { return await doWithToken(async (token) => { const startingTime = window.performance.now(); + const params = new URLSearchParams({ + token, + }); + if (maybeAnnotationId != null) { + params.append("annotationId", maybeAnnotationId); + } const { buffer: responseBuffer, headers } = - await Request.sendJSONReceiveArraybufferWithHeaders(`${dataUrl}/data?token=${token}`, { + await Request.sendJSONReceiveArraybufferWithHeaders(`${dataUrl}/data?${params}`, { data: bucketInfo, timeout: REQUEST_TIMEOUT, showErrorToast: false, From 9237bda54c6e3ac9e4319b4b0a48188c5ebefa7c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 10 Dec 2024 10:02:28 +0100 Subject: [PATCH 291/361] use annotation id also for access request --- .../tracingstore/controllers/EditableMappingController.scala | 4 ++-- .../tracingstore/controllers/SkeletonTracingController.scala | 2 +- .../tracingstore/controllers/VolumeTracingController.scala | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 7dd5fa80389..925e009ea7d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -31,7 +31,7 @@ class EditableMappingController @Inject()( def editableMappingInfo(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { tracing <- annotationService.findVolume(annotationId, tracingId) _ <- editableMappingService.assertTracingHasEditableMapping(tracing) @@ -67,7 +67,7 @@ class EditableMappingController @Inject()( def agglomerateIdsForSegments(tracingId: String, annotationId: String, version: Option[Long]): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { annotation <- annotationService.get(annotationId, version) tracing <- annotationService.findVolume(annotationId, tracingId, version) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index c451cea2132..6cc563ef9aa 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -70,7 +70,7 @@ class SkeletonTracingController @Inject()(skeletonTracingService: SkeletonTracin def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { tracing <- annotationService.findSkeleton(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 6515e3c6a8e..b47300b2ade 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -105,7 +105,7 @@ class VolumeTracingController @Inject()( def get(tracingId: String, annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { tracing <- annotationService.findVolume(annotationId, tracingId, version) ?~> Messages("tracing.notFound") } yield Ok(tracing.toByteArray).as(protobufMimeType) @@ -213,7 +213,7 @@ class VolumeTracingController @Inject()( def data(annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) { From bd1fd1b49c39ca4ed41fc434c2c9a41aedec17c6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 10 Dec 2024 10:14:48 +0100 Subject: [PATCH 292/361] fix switched ids --- .../tracingstore/controllers/VolumeTracingController.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index b47300b2ade..20af5081f6d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -210,7 +210,7 @@ class VolumeTracingController @Inject()( } } - def data(annotationId: String, tracingId: String): Action[List[WebknossosDataRequest]] = + def data(tracingId: String, annotationId: String): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { From d92472bacac72d579a055670fcd22d85b54e01c4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 10 Dec 2024 12:37:58 +0100 Subject: [PATCH 293/361] adapt e2e tests --- .../backend-snapshot-tests/annotations.e2e.ts | 14 +-- .../annotations.e2e.js.md | 102 +++++++++--------- .../annotations.e2e.js.snap | Bin 16858 -> 16857 bytes .../annotation/TSAnnotationService.scala | 1 - 4 files changed, 58 insertions(+), 59 deletions(-) diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index d678363df63..c8ed3bce4f7 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -77,14 +77,13 @@ test.serial("editAnnotation()", async (t) => { const { visibility } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; - const newDescription = "new description"; await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { visibility: newVisibility, + name: newName, }); const editedAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); - t.is(editedAnnotation.description, newDescription); t.is(editedAnnotation.id, annotationId); t.is(editedAnnotation.annotationLayers[0].typ, AnnotationLayerType.Skeleton); t.is(editedAnnotation.annotationLayers[0].tracingId, "ae417175-f7bb-4a34-8187-d9c3b50143af"); @@ -178,6 +177,7 @@ test.serial("Send update actions and compare resulting tracing", async (t) => { }); test("Send complex update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const { tracingId } = createdExplorational.annotationLayers[0]; const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); const treeGroups = [ { @@ -197,9 +197,8 @@ test("Send complex update actions and compare resulting tracing", async (t) => { ], }, ]; - const someTracingId = "someTracingId"; - const createTreesUpdateActions = Array.from(diffTrees(someTracingId, {}, trees)); - const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups, someTracingId); + const createTreesUpdateActions = Array.from(diffTrees(tracingId, {}, trees)); + const updateTreeGroupsUpdateAction = UpdateActions.updateTreeGroups(treeGroups, tracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions( [createTreesUpdateActions, [updateTreeGroupsUpdateAction]], @@ -215,8 +214,9 @@ test("Send complex update actions and compare resulting tracing", async (t) => { test("Update Metadata for Skeleton Tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const { tracingId } = createdExplorational.annotationLayers[0]; const trees = createTreeMapFromTreeArray(generateDummyTrees(5, 6)); - const createTreesUpdateActions = Array.from(diffTrees("someTracingId", {}, trees)); + const createTreesUpdateActions = Array.from(diffTrees(tracingId, {}, trees)); const metadata = [ { key: "city", @@ -235,7 +235,7 @@ test("Update Metadata for Skeleton Tracing", async (t) => { ...trees[1], metadata, }; - const { tracingId } = createdExplorational.annotationLayers[0]; + const updateTreeAction = UpdateActions.updateTree(trees[1], tracingId); const [saveQueue] = addVersionNumbers( createSaveQueueFromUpdateActions([createTreesUpdateActions, [updateTreeAction]], 123456789), diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index 48087c9ed20..cb8f480c3f6 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -1008,7 +1008,7 @@ Generated by [AVA](https://avajs.dev). url: 'http://localhost:9000', }, datasetId: '570b9f4e4bb848d0885ee711', - description: 'new description', + description: '', id: 'id', isLockedByOwner: false, modified: 'modified', @@ -1544,7 +1544,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3727, z: 1545, }, @@ -1564,7 +1564,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3727, z: 1545, }, @@ -1584,7 +1584,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3727, z: 1545, }, @@ -1604,7 +1604,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3727, z: 1545, }, @@ -1624,7 +1624,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3727, z: 1545, }, @@ -1705,7 +1705,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -1725,7 +1725,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -1745,7 +1745,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -1765,7 +1765,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -1785,7 +1785,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -1866,7 +1866,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -1886,7 +1886,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -1906,7 +1906,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -1926,7 +1926,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -1946,7 +1946,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2027,7 +2027,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2047,7 +2047,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2067,7 +2067,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2087,7 +2087,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2107,7 +2107,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -2188,7 +2188,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2208,7 +2208,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2228,7 +2228,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2248,7 +2248,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2268,7 +2268,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -2377,7 +2377,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3727, z: 1545, }, @@ -2397,7 +2397,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3727, z: 1545, }, @@ -2417,7 +2417,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3727, z: 1545, }, @@ -2437,7 +2437,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3727, z: 1545, }, @@ -2457,7 +2457,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3727, z: 1545, }, @@ -2538,7 +2538,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -2558,7 +2558,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -2578,7 +2578,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -2598,7 +2598,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -2618,7 +2618,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2699,7 +2699,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3726, z: 1545, }, @@ -2719,7 +2719,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3726, z: 1545, }, @@ -2739,7 +2739,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3726, z: 1545, }, @@ -2759,7 +2759,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3726, z: 1545, }, @@ -2779,7 +2779,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3726, z: 1545, }, @@ -2860,7 +2860,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -2880,7 +2880,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -2900,7 +2900,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -2920,7 +2920,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -2940,7 +2940,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, @@ -3039,7 +3039,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 9120, + x: 9286, y: 3725, z: 1545, }, @@ -3059,7 +3059,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 8120, + x: 8453, y: 3725, z: 1545, }, @@ -3079,7 +3079,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 7120, + x: 7620, y: 3725, z: 1545, }, @@ -3099,7 +3099,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 6120, + x: 6786, y: 3725, z: 1545, }, @@ -3119,7 +3119,7 @@ Generated by [AVA](https://avajs.dev). interpolation: true, mag: 2, position: { - x: 5120, + x: 5953, y: 3725, z: 1545, }, diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index 8a0b4fd925d493beaebeb156e2226ecc6b5a7516..42e2f533c0cd7d17c90440cc6fe13242b6535bef 100644 GIT binary patch literal 16857 zcmZ^JWmH>Dv^MTi+@-ifaiuWgN7zIOk}y z|LE;Faa*%Ubt;m_Za-Z-2EzYb$Y%w9R#XcAUSw)5xYRuTKxrwotnRUA#Dt@tLGwf( z2tTB_ts0owsx=qPNGM_U(F(N=Afi)`X64R^9e<{NKBPIv>nx+Crj{AYvg4p86!M@M9S(EDdK4;suNc1_2Eb<56~*s#73LnCF=g zZp@HWdR^2#4#$sRc8*R>RQzhl7m}x9q&!L%kaYtkp6<})%XQuYEQL~9U#_{lMXh^} z#56xcS_O+{%~hBQdyc+bUPX2GeO$TX+!}Kpqh5{YsBqQjI*$%D%Qx*K_rC#VB;3+| z5KxA#%3{DIb@jP(l}An)c=e%3ikDS{SHi)>*J5`6W5?*h##)1?TehmrqnmWcDEpyH zKg8!BslLxk+PgNss?yHx0b^|)9X)Nm3|&3J#UrkX$&8GjNA-U@bCF0eUt;J>J$;ss ze~qzEc@Z1zi^j)$dRA)I`jQ%AbQqa$dHUHEv^bx)@3|#vYCXPkJQ!7Nc|$7U(;3!dd6;)yE9mj(le$4zEy zzYaAYwoU5Wj*){D_Jw=ut?pZ*wu3buhCY#fNSjbY zaTT(2k4p8I_1q7%ThO^n=)MJ_Um(hn$ z6J5<)yDH*Le5F1D-}}SKvX93z+E_Ly1{`Qq6XO=xX^gLR1T6|w79Q}9C?2E-%`^xO zh$FiS4D6x#zwaGp*eVTOMxdj5?-I$bXk;B}&uHdM?5;uVeipLG!(kv}6WLD*b+PgD zV`1+UFa}hK&!VuiWV;iprZIR~7GUOTG|i(-=e)Oz<`S3?iWtXn$U`Nm+0@TYVPa}w zIuq;&rQ}i;|1JSoSngTxb;OiJ34wC2mmUbaZm|+9f8%{j&-s zKBa!KKMx#=LAN#N5EMe~J^*#CtD4X>{2QP-*(emRs>{N^7+|vL>zUKBJh}b%lXPjP z=0Z}l{^lXKQY(irwZI}jEp6VyE@Gual#04xCXoWz-qB^7)G;?}QH*A2li}xC(Ifl! zL)-Z7+3o7A&jv-sG;ae>_3_~~HptU64n#elzc|PpnAK4Z0xy!UKHI}vvzwju;LV-H zCj?iXi{7Nhm5)kgWtG=uZ#noEBqFg;UNH3Nby9x_jrYi@LFO5VE1!MXYwElM`@Yp1 ztugO+tyA7TDf&3(TsV6NxMVOb3G4fWpdt~{G=v=&Nn&YhDVeHo!zF zIb94p%@d_PRAS7OO-q^TG3IUD01I`;w>8IGlft4>LYi6ws()fqPJKi~RZq+_d>Te5 zXAMNgWb4oWWuiF*v2Olp@VHnXrczhtD32U)D*+;3G={G7s5TcaFWz5X50uld(z`=*_6!PcJI_seTR6jia~>CH=oQ~`{h^w; z`#8{0qn>x-+~;i%ukWh9sl+KIlUpH4TwT-Z>+)xV&Jm7YDazHfzy&NVV<;KU=)w;o}ne@BMb`17zb6m5qnv zckM6Ui5d=UTnGS}U$`|a6r&j7L2`s#m=E=KpvX!>Qgj0 z78mDvU1hLOdY~_nPx648Att8Kl_92E8&V-^JSx9C@k?~|I!-i)g(ctB*EZjEerv~; zRcU0b;iUr5>=1;{I$!U2A-kr-pNBX1Np`H+n@s&zSfj@arLFACD6I0wG?Gyo2&{QLx=Bfl3 z8DlU7wrZwTI1>>uT$LFMAP*u=B{BrUf zpE|b1PN8a#EjOn46O-9gI4l*n6}085fAdYJS&b0#8^vD`(LyVX;CqR!9jVbLU>m6@ z7p1C!1JE8j-x_%GNp8&77(#$8=>jy|{hHdhEy0#!!>iNW`o=_mDazm>kdC>B#rCez z+_!=!oof_pzOHEuYt+bE$vgejnp4s`>ea;B-#fi-0WIDYP>$OWmy{~y98u*8OT$%^ zR~q|$+Vm4oX&rie$hNxc>py}gvu>^Do$e}2$fXf(lLlEsOm{^nrFcIUVh3MV=JD#! z{|v-v^nsUhWG4HG=}h+hGB{6j#0c}ifC10$(5!Oz?F#Lp9^jPp-P;de+!6Rr4!zQ* zQXxo~=c$k)LL+@X71UZiKqMB=N3br(y&0;BP=etQwGefAG@6iEW%Q_gojS9xg-TH^ z)J3S=$to6&QV7tE=Ds|zEeNYe8*#C&AqBO*QNHQ=i0$VLrQm)t#76%*?`iy6*lLFE z;xkhjk>>-;$9t)XV3L}c84{PuuZ-C~fxeRpri9fa&f%oEg~{OnQbqB#Qg7;SZyO)n z&=mf76gBmcTZv|H(uSGd(>-Db2943jYz1PJI3*CgZT+T55t+%RqLy$aTgOkx^3*}r zWRwi7pE=RDq4w|Fc?$Fy5OE;Cd${T5y*jxu`MIoScP!CpXcS_7h)QKo=?@OvL**1Y zyT;ysrw|OD5>^NVpC9G%X3Ku;vkhO?*hFV@{yg|ng84%5hF>lT<327W=&m-9BD`-3 zCvO6f(AA?KffUaD0RyBq0jMY*;39og6xeW%Hk^R98M)d)HaR$NAqdK;2o;!{^=rg& zZe;O&?1&tws0`wbPRa-#NGOCk=bCTg3M!3UR~Pi3eUXB*5dsmZ$F$jeD@EU*k;kLa zGce#y-XVTSyL%g?`2CX-gPf1iJpM7@cVh=0no3Y3DsAg~di*mb>;#PjG#aNYrorFx z809f;T9G}&rnU&lw~6xpyaay@$nB#P7~W?Pl0@sgN2jU_O2qSho)^FXTwrF7r`{s;_C;$b_yrzaNt zMTv%r+x4asX9c$xafLaXNwy88N)^zDGJQWSgY`+ANFPv*m1GEJ=IB&IovwvQkT8Lu z2tw+HL#jfZi-0If-&&}G&=a{7nKK~hk(x?~3?%u|rQP<0oy?h+F``~?A>)OLlb#BSw6sth(2eBak))8+3d z3S{rNoqbofgEB}LugB8uH5@3~BPmW6nIk(6AxTE5Wxhuc0-E4*r1GF|YHSQ$tL<5rO_@_%g>(1mtvbs}{s0fdO1Jf6Yj1JIBUbM&ze?gjmI20)v$A za8-r3=Q=2%%8XA-7|E1SImScv6vln7SXt<~VVa!)Lky;~Isn9XqCs%%)<1$Gk_&Mn z3U&(D6W|gVinZ@t!@>W@+UxwG=im1QR?3%80;Kt!}biP*`ShTBn~->c{uHRWUPCi^eAS4ZYEC#{1NcL@t}+TJ{}gu&N|Jxt*va3x zm49ORUI~^MauN4UoIhFbE#3X_(&^mc;I)hp;w7iXrzmBiU2#HA6;r5C%4E0FeO!#4 zuwe2B^F&ig?oh`MW%T+suob@I*sjpsUN?=YkbIgIk)Dfr*Q@h&dZ&wKFb`F)L&Sv} zAPmF(RG+rkxr8P&mLs7Vr{?|EVyv&|JYLpH*}y%}E}_3=$i~-S?M=H9CELYYngM#J zA8kX)zI?1;NclNvPQHgIja~$Ep9zd5D{4+%Pm_tVr~=D7JhBF+H*GV6iBw=J>3{hm zY!_G~m?=n^nP!gX>C@y!N2fZws~xF>0wkEz_0qD}dN9+iDUo6NotX_@`iH?q z?r}3g$Dj>dJVOjYZ3&K_34Cs~Wj}@~8srk_52?=xrtk=I4sZ)-@ho1AgZVY4ZfE6S z&Fr6Ks1Yl_&8fN+WcV2WFzHy%(LbtdyhAe}Pg=>779QHI^A-^lrK-)>|5mjZn<~gD zB;HNNapIr&Dd2i6L!Gxp z1b8BY!;1bZ7#CEsHsNF66{Cdn2AslmOBJGgQ92TW=TKg;_T_nmhA!$M#YkFarKN?9Nh0mT9SWCGGDPOo;`I+ znt|0bA?H6riNrX(t8mX-z!JjuQ)rAf@mP}`ashvNxO{FZDJ0w+>1inQsl{BFON#MWVrj)ZnC(i*Sew}>9W3A$6`1kgYV8mF zOw4it{GF-L9ou_3{@`>RIo)EeK)q@))U}V0PGV8UB%PA@fAV^4eN3(=A7VttcfCc# zsCrOi&T&vuuc37{H_KEm-Oq*;E`eg6b?mzDmh@3z?B$8BzZJe+=%Vog zdY;y-x9Jf$8`*~ov51-_M8--eDv#u;19~u}g@KDWX%kH=I5+m9(>Ql=L3ZWiKf-8Q zVh-*`3#W1;e-M-ZRwc*4QIIDklV9;`pXMRE8g>#nph4~t!>7N#bdtfB-|BcM!i3o& zm77{Mqb`lMdbEH9l<2Ye>6G7Myh)XE0+Mm-q&##)^z@(@l-RS3L{1#Ae6VInXZp}p z%)yBEHPHN%Wmxt6AiZ;z$T+m7&Zo^+u38KGY$#Z3S$d-A@5{^d(UN8lFePBdU2XYg zdf5c(XW+kuWoi6Yf5Tq^k6yC{tWH(e0oEz)q<+6)9Z?{R;d=TM%f^HLhUhfKHiq5T zh+8q&7NN9q5>IIr+Tpvx9XB4%oqKl*i=}B*;j>7X{DsU3>?<;4mDMH8=Z(Wov9(;s zutRW(qi~@FSgGTkqBNZ!pV=N6I~$duB~$uVL)nFT%h2Ss5bK_*5thK0=uN@>76>Ox zmds%{pKK90%?SofsCAqFs(gdiy4;ff2xQ?8z8MPvHl~KM-yhS{P!nPY>(@sF(&Se} znEJqa@$|rnKBWVOz%PEnN)e4_fq)aJxRfMSIfDSXRXCynrIm~K{V^A^nN?U14y+q7 zBo5CKS7gwUF`cpmjNw2DHDK%|=5}V==M|ub+k14irjugKsuo!W#9yVEv#JMzB;U49PmPym!91%9Z*Y-q=7L)`$kFy>Bft zhx^~pI?zu@zlfB^)x;xwe~f`mO-A#hdmX8V8y1Ob@fFRo&kl&s1#3a&{}j>aNoZ{X zplfIT9NCC>Oa#-%=jYPGg&O+#K@I8dq(d@D2OEEP!#FL;LVOZQ+*=1N>tJ^D4l?2) zLW^hut^hzvIu}+5352U>3oRMtjv^`CGUv>yW5`ybE-uNkZ4LShvL)72x@&l`2DOoJ zOtmueega8i8ZZObc>}i)LT+Bkp7@t-~%Kwj#25 z2?djU;SG~Lv&?y`1FcWJA2Q7gsel=r(r4N_dbiu5z8q=@p6vW`Tsom-7UX=#BIu%H zD_CpqFqNsyX41D&q_&k&-h*G#D}pSW<@dt$=%^)g+Pi zs{U!@Ro?_hO(3?|`EEcpRDGv+5RGNiKpGD$S&>F#@e(RIGF!cs#}pkO3{ZFR_<33E z+h2#Z^7qmN&2@asSo*W<7h27OM+Ti>tm&^ip1&VMjiKvd*|=C`x?3Ktz#%lHb;$8y z*ut)`{|H*q+5lm}mGm-Rl&_IZOnqr*@XbFv-_A8W{K&?UZDe{O1d1DhpEd))cj8T6 zn2_=$H?QtV;fPv8ibB)dwuE2cA(s781Jp(H1`gOI7FSRIvzgt|7IQxDLA zwWSR<AsLxD1$gaqck%CF4KKO>pB zza}t?-I1z^3-D+D?RgGo?pp{l`gk`{jYESIkE%gg6)`4mR1;y}UwZ8gtHyhwrO}|Q ziy;6@u^~^`W*tE-qOv2uy&X-%yVvJl&;AWMgibJuA8!wB2t^eG5JqZoLM z`Lvw5-k|{G-&!`ZX$vfTxmK#)f=7HrdU+6jB1z|PKMhz5rXosO(T0>ln&uXrxO3s5e+p3sa1 zj7-?et}q-%xYqOhs$#wwQd$Q3z4#87XO)9$WHsDW8NG7eqNuFIDVa=sfH`9>b1HtY zAqk`3gplJy&H7o5r;vE?6n!~8$LRyl%t6eiGuB^xY{~P?O1!p?nn5LO+N!#(p!;l8 z1Fxt}XZ(bWeP6e6Zrw5upTOP%Wnk~J_{Nz#n_s)QM6_FO6E{8M5K`(s5lL_Zk{sa1 zc6yYbQfE^bHC{h<+q6EnWEjy-P7F`-aPHeK%t$!Z0au zn z-x85DDCDD{HLjLU9=GaGu~?oJ1}d1>3-I9{_trUwYBgZVb>2w(>2|f4pwP9tHGp{P zzeO~E0cE)t9Xd&nraDWo2ENWmvjnboC&d|0Ixh$_2x(EYmX{QK=_XLKwkD$U*k~J}R@}D2nFZ=%ujH04Z z&|5}A;g~0K9o9aukQ}uw`9k3142vmbp+++V3Twbt#Zp+|F4y+`-}Z1N*+;e>fd4Wo za}C4j;djC~0#dzI{!f*16 zGM{Zj>-R((Bh0>0&$P-xsZs_MpfsW}9*l8hmxe!ZtdxeIadfIsv)BAY`$kPZ{;H2^ z2^s)JM>$s;3^6-*a&~@mx1vi$ptI?Ez1D?>W+l%LR8X!|0R04EGPvpl{fw1T>b(Yz zK~6jRjrx>+=Ag}!&&%eegN0hX(sSnlI~8+h*FRmd8w^=3N`Tki1y=(P^yVHo`C5zY zQUg6cLC*-5#3Z-Rz!noaCx}_OA+>1Z?8W0im&$e)`jqqM!@y|Tsl}m5I15Eh^Hq*y zm>6kNBJf@r|8-t2SCsh8JlgH&@a^YZ9cXUnKU0ZS1XS>>Y73%fymV(57@=QefC$ZB zi#R#o@J#?Yk5h^F5a1 zPx@f?%iN#&uBXds*%FCwg_&%~s8ZPlV{=@vG(P2Ol&DPTvvh8u^k~4Evqu;#4_>-3 zf8d?33n;1^fAc8jljt-!))r`u-DxiPD(H|0{qP@!$}0caSTk|oTp*ZQIYuQ+{hcJ| zT(fF!FAAG#SO#X30@#9QH#ROyOsF5 z%~Ltpo-Ro+0nU$vuiW?i=D1$rP8(mi?wb!9lzf%&gW{xolY&mi7YcBDI{U2AO+NQ| zxg_8QpR@zfUep9(L3e$?(C?vC{#IIR@6B9ODfdU02Jo>zs3_G!2OV+EK0aIEhU%3q z%rS4e3rT{aWgaDxmKR{M_ua`5jZ7&&L{Q8_#w7WNqdA%C^*lGnbV-YOF;(f0M3l@j zRhWsXO3rd+e@Nl|PvxXYvf)~7R&Po|j1Ncob`dT-r;6!nVVIMI*=t4xZeoVR?{ z+fLCAldv3$znFY7h{DVR1Ur0BfSli)5n|cNOat45kQ0<&?E{k7uHOfoNIRF1E=(aY zFz)T}Kt01ZPL4%vDgr~k)&hD<-N&_(n%|MlKsFotLWL<{Qk6`ycxGR703}$aoha*s@7TYelWj!dhNv_b~HfS3RU%) zR+J!-r_ok)($=i5mt#R|LfzqioApF4!5bX zaSuaD2KDdl?>$|Mh3b|BtDd}#dJ|FpDozoijHZEQq-&*(m7odCPd`@` zAW7ek&nWzc#LV9G7FCiRU=pW&gjAubMZ)>T2Wuk*DO4F~&JL5u>ZMQj$o=qOAD0or zi7KrOoYsXr63>)}3z48z_!^3<$sUP+pDWX~{=_k%7A5x)oW-H{&#FSTk&N?;B1e86 zR#wH}l-|7@$0_0O4%w36nAnUE29lS2tj5a0#~N4wM{p5VZtJ-!4VG&18%{Tkhy}sy ze6;I%i1s)C7v&07I%b1zA>1VOh_rb~zF;R8t}oV}go*WB5W}bX*Az6rD)5jK%$2e? z7ww`7e6ZOD()=KMmQiYQcIO21qO>iS(-h}YC#&Q$EYbeFh^JF>-sD}a8B^&k%0Q#3 z8RIqAlXdA_!M{ac`_JwO;3K|~kLOo0G;{=W74PM!?)&~&P&pNF^LSKneO{iB6yO(G zT=-{2uAf8y)Oy6SCFWHMs{DR*qMxa(1?7gKBYv8hl z_uh>*1d-ujZU&vF%=CHSVJ>=a%slJrUd&9*S-ca}TVsuhR(-sg(RPZ#CvxugH*y@R zwk`?UrXcP1+kwsW)ep%DeQ~^hsd4&X;NXPa@}Z)B0xRRTu1?$OVw}+V-{-@vCxVio zWD0`ryCZmGUMbqyM9lMb;qTJ7f{HI+Y^z)U>6FavCu3#!UbB@{RwW0bWwJ}V0kV^L40yQ~sTr6o|Xjt+Ng7I1!8t_z>~#=(gV>zVIXdC`4F__C<*mZZA#1kT62{s>vX!7u{0o}VEi6OzFS}YS5x!UnMM&3`10}i;uGfb|T@?K^ zJN(BncGw2-y*e>>dTg%OKq0cOC(0~C&fy%jQ33K~spEI`zjf4|^yuer6spGMi>85w zo)9V>@8}7+v96RHY%fnJ-|HoF7c8_3)?s$fLRJ^R$BHlf2NQ&sUL!7vpEMa!5CE%s zKjkTnGZ~W_EXqV@a&2Ap`cx7uyJ(gJ^1Fua1}k%gjiGAmBCwJU;G)8m5qpghVs))I z^kf%4@`g^?Op(n>B& zf(%wN^k^LPi_YISIn{zRf9I|LddqsJskeHIbLHX*l^&Mx z&!$xl9nC`(b-Y_9OgdPlO3L3?vFbzEQy1UL`+Q7K1)iI$x=? zWn)q~3=+KT%1lKbI1GvifQ7MHCc+b2itRFsK_->Q;l_X?E`NX0-UAuhV0vO2j2LPR zH}pX<^ueK*YY{wRJ-2T)|1RHaPexz1SkJnOh6tO9QPJk$dQ3*YqBKkb=fuK6iaDU) zVvt;9hiTyY(`eBYqB*8I8?c#h#SF>eaXQqHq5A5Vo|HaPC_C^%mHrD)T{bqZ7c{>M zaBb!v%oBVd(~*S*GO@##=pe>Yv#w_gOi*7i#adT|GRW%r)QQ%oE%;>)eyM=vvIXUv z-S&LEuOUX8CbCV=8Yi8OSiQDNV+#sbUkd&yDe6wU!G~W6qf@ivtHasP1XuDqaz;=+ z9pGnSc57yy1UBMGvm>7bXYwg{KZ1>3 zjIF;#p&iOIJ`HAu|2~m-lkam!bG%;e#Y4A-VN0LxL*6m*0t3__GtA))*n8bc7D%i- zz`V?X`4<}vu>`i5W>3*NYCPVUyr+ZY4D9X@&d`0j%psZI;7+}iK1Ci0Uv^vGDCW}B zHc#W_j{x4Nv(i%^&(*xl!80|en9glr*QtXVw6?;^lI7QV`lHit{_JiXJYzJaf5{hS zXrbn4+dRmRp4pw$c*c13nCsbE+#Dj(~rKbar zhBw~N77Zs=B*hk(Ly8 zEMl!E@!sTI&gdYGTN>PN10=lOpSQv0OzuOt%c)V)@WUt3!R-+!<~?`BvHgFF`;vZ+ ze=>kN_S?Jnl?kiA76dg~e&%E{e;;aK@w2|$+rlrtYK}rrK%GJ`35p>1c?yNF)&&-j*@>}D0{7F@!EQtW061%xjv6WE^1t)(dAuOFk?yI zS5tPDMxP$X&rugBWMaM*?tlevg-b%_qZZg*6MK5Z@}>Z-QU+(|c8(%1^WOfx#uP6P z56=`YN1=hXUW(;{?0GAXdIqcKZNb~73rDNG5uR3-Kl{ul{~GgLmWC%M$wm(MNB@1# z+fFRXb3-59-(N+)vlQ`wzsl~?+=?RG^U0e}AD=erjOp_7u9x8uuO*#`2wmT_y8&&j zEnAZQTu|{_a~(Moe8F;dXXP_sw4MSkyVOidY4uyN)2O}*y8b4uk&CEjt%T>n72N0L z$qK5Sef#O?m5tsqx}E^Tr;Vp+@=Zy9n^zw%=tyT*SD=ln`;TI}o`>7}u7?8hO;@S% z*Y1Fwx1YVPqvZk}CNj%K4Zh#Kk1*GXs7I&#IlR9qWE4~^8%($<#B}>!uCx?QZ3R2X zSti7~F@78SaYwqpiN@W)^ z$@i|Vhnbg?{j*E2>*sgf4|LBflm8~+mRl$L1nkwcscW-?iotc&;00t{76sf9T1P$( zXTHDhBRWrX{BZv@ku7y~z5Zw#9fb!x^go6}8sU)~+;;{1w*KA~aBU4wfD$dKh{!f| zS>E>HsQZg_h&%1Y*U8Bf8aPy+)Y(KrFzcwPF zq%tD-^$C-cYMhUac^!Hqlau|4vy)mkaEMw)q{?u3e;-vJZe3p=9_a%-?AegheZSoU z`yQ{Njg2p4#A$=koS0b4Xz}YQnWEM}B5F<1L3S=rDh5-`P#_VH&mfJazX35FsMTW} zFnblFc^^a6y2#gSqMgCqVrGWhQfqB4BpAQLn9h|<$X#TD>!3fBxCr}NeThNyra0iyC0) za#)vvwvdW_fH${#UDTY(3R9c|9A_|buZd!GhK8GT>g#1NwmjK#OB!O+Er$e-vvgsAj>}wKaq!jQ{xq-d*(d;KT$opArd-^k~wydq{`b! zy6pp{+~NK?se7oUcB7=#t4O18?pi?~+b;8B1L@8UNY*~!P&qN3fnizUVAso93S@v5LfM-IB}H}xMGZtzcA!ddT+u!o>96Nx;*4}W91PRiyIE&$k}T~ zGnzxlzy+kUo$EE5F`E1+M&xk|lc0i#>FC!_Tvg%16p#<8tmVt-&p$Ssv6}F>N3JF| z{UGQq1qJ5porQe6ri4~1`l}OrXg8mda`&%dT!;*6C8nH{>Isc;9dwt>;Er!X2#51B zxPlbMN4ucJBBy|%FQpts6IW{P{+qa#UEn#UuN`d0?nK3VrMdW_wlsOX+V=$DaJxx=5h<3>C(D0p3f%-j%HppCp;$5^7}9Fk%C zfH^m;mo2oX^islhM8*xsYcjBcRc3-}7n)90w1`zv-}IftME*g7BypQr;Kyboq}QOc zg5-sXqsP?!Y181olO)7FYC>4{g;rtrZY<%TUDlea&NV_@XZ6{0m$eOiyh_Ch!h_nYFAeVu&su`SznO?Z59oiN`JI*ELYm{;aRzD){kQPYN@? zr%m05wb9^=s72Ts8ZWwQ8L%R|FXc+{lWj{5m~D*KjLEF^PYgQ?kuUP8;eQ7bv-M~6=dqq+O{}zVp70&rZj89s zN=%(>+Zc>E?olTwF}GP&)MFd0nY2!vyez_#X5j>4OCwnsA>s{-Q2yll2K;55Pl~!Y zrYq2>5xtSteQAg>@LmsJ5jlbXeH3Cl1%yGa_0QY!FKGV15a35?O_5g;ntox#P(W=N z%{(H}6Jad`bL!gk{CMM@!d&Qi5N#srBQmLJl#Xl&Bhiy>xQ;!W&=8A&nSHdx1guxg zqbTAdIIH_O4u+a-~-Z=Fe0sbm>(e|28FGC01I2tq!fi&Zu3tUDYN^?c8odh zu6?k`>4oTLffr(T_N|SC0O-^3q0$gt;<4PuSoNhR&!@56{!@+S{qKFl z=w{u1wv$2Q=)vU-#@EB&#&V02WQ?yz`&5=B%RBDF;%}4c0z4kV@<_&VJCs;lLik>6 zCq4eM7`ODn(U!aaMQh=x)>rtSU_-cSOSo!#U5`sdvKu@)K*#4goj=GoOyDIZQC3VO z!Qc4Im^P$ob4F`iNfnlA;Rx1ORVpwa!l!%`7T~_2lT+AxM6M&<8L0N^h-F8)vK_B1E%DnJPoi`|aMt9l)_ZM5%m zMU?|&1pbLi*c)IflJxek8B;1zPBl%EB+eh9bq_-yHO+e7Usvz%5&aogU zBaa>IEpGxzw>Bqk9{M2kyxH`uJMZy9{-^PfT!lHmq~6*R55o@4P*kq8K=1Wd0$e8u zey?{s0z)UT7QLZg;p4BByfbgX@NuEO1OyHVr=_#B6ddoe7O#d zckOQ`DYXw0E_WCLt^%rG?@L|pZG?MsaV_m4?D(lgDey>FL+sR~7fo4*X49bjI(laR z*lI2caPT#wJLG32a}JZ!WQ9inMqXCdc%ojA1?o;=bl-ykvKKF#hGJDk5m(wGY+X-iv6<41KDa^Be?Bxn6aV3%iHA<|0iY;-dR5fV@L#0Kx&7lYMOV-; zq-;Y3C6WOTY%4#+UIPS6k^w|r==*PHhV7IK!H0U)xmZllXsQdgq^31pNakgT-)Dv8+|zE|3mVL^ZX%q2-9=eLlccm|b!Kv69GI5Y)X< z81{)z?f#AttN>_W3rtb}+s<&`&)}~^Y|cvb%&*Lu&qDZ0%SFV^7JW1fXkzQO*e_Z7 zB-%{gz#gsiG%71xwzXz-Aa0z>yZ{iZNe8o|w42`u^F#_f9W2M|^WOfr z2_=|eu`&2t`M)P-*g7+ygN@Uag{{WY0?C65NGHF_`KOs;kFSo}K8_e{_}fkd#VIP8 z)UGjg+`<6QFl#8H^c3%5u&LuWG{SR+D{6N3C`QO7b1=H1$&KA?3)uv^sRo^`fC1U4 zfE-3WI1}Ua9u##~bVWaK+>OwtxgwIYfUVS5nQ{U6)!`>u5e{Nz&D7_YbfI0;b)58} zs9dIqZJZFo*9RJ@VyqBr)ZTU^BTQpX>t|I*bIg8oOuvT60|TU5D{c7ns>X1!+G2Z> z{2iGP07-8bR{0{It&qAYu23zslU5t`!U$KwaHz;|=-_4dyNlHpWwkjnYaMWWd7$X| zps0?(TcH?BxGw5v8=nt%-3?t)7pc1m_H=zze^qFPk7e-_-^+6 z=#p`*PsD6d+BM|HNyfKDI0UB&^&R1v!L3c{rp?a5{`TnMzR2VEy#C~(ywPFh)}97B zqO-tVrPd&y1ox01@EW%TUgQ2km~9A;woG;OQ;}g=tB<9dJ&%HGkzRt62<}fCuJPXx zeSP7`Cb>-M=KLq`Ybkhr%{x3jnM+P#b^V_sSN8;Gv@~UT9!I0@A^aikFVoUJ0b!0J zKjeFC;JCZf(^p?9xXN>Q>AXcEBl0SsD1v`JiqjMYkDxy3o8w(LymnpRJ=pjA zByUQx!IKbg!?Y&V3y;-;7;cPRQ6zLZc{86-25lt(?!~`R??aSzt9@mXmpLANc_^--XSK=mf-2IJpR|n4zp|VMV@0uyqkW08X8HjfXy7Q9{e_}=B_Ci(WY(-G|j?kJ_h z^B(*&6z=j11DrshWz;#p3Q6fT0G`;Qk?F~tcZ7R9$D{IHiDZkkRjOgzxJ((} zK?ltl!y$}sn|>)1R|(E+HPJVV5RzeZPGedQ6X`z6sha3K5cHkSB45rP`~cLD^L-8F zoPzH*R5erSYi8BLzyhU_1u~D$qH@k2kuzk0azzLEE_SC3P6xniPHp7I#Q{ShD}vQ8 zqrbS4>!VIh*lMFDOYe=C$9EOR-P!%9q}NRUtBWg*wf2{Rc*zj>;bGC#yq6l z_`YBavIzj#L}$@E$F2k0#H?u(RYKvCLN(JNHj@qjw&C$#N8OHxbMwZUTSz&k^nTb} za9eP)fQNwF&>eA^RMZ;Vj0cp<*%G`=y3&Sfv4&*%b*Dl#b6kG>pLo*(ctxBIKH*@T z%cBliiN^IhzK}JJSG&1axiwgqX2SwqYZ)9<3L54gmstk3(?&1JmHayIksrJIID2&! zXk?0i9w@4>*QRd&kQ>M*fW-8vNv00Sat}>g1^bkn9Mix=M)wR9bWN691i2fu}=Pg&jW>IC^CEFSt zPNnHHB;>xr402944jIPv=!Q~>7%BK#cBJmv&t9#t{BjRR)quRw53tQlb`0Omg`~1{ z$`Rjj7#Yy8u9KV{vH1VMe3?Zt`3Ac4e+b-zPV#kA*b5Db>%5>0CvIq_Quv9;sR`sS z;)ulk*36^|EvG1SjrPlf_Ln}f$Bk%ZP-xx0uSCC+YSJsemC|tU9YMyW6&*u1jJvAh zq}=8a2+2bC4Se8hv9}qkd)$%b|S`_q!*V%nDX=$Dl+# zOb)*@UU$t(o9h4V$yUcoplPAG8w7=| literal 16858 zcmZ^KWmsEH5N>gY;ts`K3Ir%xic=hdYjJH#arfX*thg4p;7~N76sNdLQVJ9+p%k~9 z@5*zZ`{U-vWV18-&g|?t$-Z+o3PuVq3_NW?AD!QOzwi~s$9VRbO>?&y8`k+#kJpjL z6>Uyb==G>LMp>~jE}BAjY8NhZBJQ*Q`NB9U^s}K?z8sO~8>`Lc=$2koS4$=DWcU5L zBe{5x%pUz>y``aRG}~Uf^m94+i+{wWF@bG6)p&*FOrU?|eqGY=6x3&~)fWCAynI~g z@n=77-Hh)p`*J&_TSd389Q&ntCtNLe0Yx6hQoY*m8|>Z=@Pa$5`?q%6g9;mOu`aR% z{Vb0V)w9|bjiJGqMc< z=sMXwt3oWk25_b%k{k$1#%I!n;_zPUVHp9EfJ5#8hO$_@kbQ|-1PdUzGL~K4Eu-!2 zgt+v0Upj?Mn^}XJPiw!P2wI?rq)pA`353LuY`cC%B~P zB60*_sXP0qbNg*<)gdP9Cgr}RCGtKp;W#ofo_OR}0fVVJLy=c!E7bbaU>Ghduai^5 zZkcD&}V^ zt=QHU7ySHHd#O&nj8~5e!E1`+0v!BAHy`ICU;>VxJJ%4G{2lKFYCwTe92#j~L=`O< z_WyqFyf~#+WUgFCoP90a4W05xHKSkt06hKR-#2~Bd-if9px5_363s1fyl>i3^=MO> z#jjJNooAEcZ|8Tn&pZqMCq;Qn~eLxw+MRU*r@3b#qo*m?#-nk~tI7b(%_)p{VlH2Fmbl??mO3t%as~yzC;@DYo z*FwU5AeC39&K9%?(x)geqO}B zEq}d_^fMTtms=5PZK)}RZp#`^Zka8(`z_s`njCp8OWOAZJL7QDqVB27HP_E0d>s%@ zKaaZ)8tu#L!Va+LlH*L+@$vBz@6mAou%442_Iu5%&OR;WR+-()CG^EtM|?6n6P7+@ z)VzhBI#oYXI!>l>LqcsGLY2*ni>*HuTI$*s|Ei|&oP8y$OzAUyi~J*jYF#wmHuyAh z>RjX4_yq2?g$z@zADsYCn2Gd0^)Cq;4nBbg(DBm$6{>GB<~FMXL zTUlH6P43oT1tzThid-XOcQK7)dRy4%s$8W5-1qF5Ml^bLJs82{quEpIrmZZF_?tc~ z8OL`>sb9MJE{ZR{GLt^QHuiAa7TmH?(q|dIdry!pydhi#e3LFER|QBh11|%&5BY)C zb!$HwYUQl9YX9kP44*;Rn3z(*oj+}JOn;9o+RAH22qkD=mpSj{+BVZJ<&{ou+|1iQ zk2#uzzZ^PnT;ahrhlPx+P1sY)>*V1~B6DbqM2EJqfHj4NF$d~WeMz@j&?|9;bUBd; zuz%rp-XOfrZYDH*^HZ_tE2qET>c__%987O6d++Nb3nIr%DPwj69LQTg{yjhb`CQOD zoJ#*0+vAgFdS=ng?W~ykTrcL@z?Z2*G;@cm9LXVKo3ebG*_^_hOf`G!rCgZfpjvthz>>ni7!gW!NRn#&lL`CzC z@>{aV85tjc3+$6omp+BW*f-`*n5fb9BL0wjq<^nwa1jyJI^3DwLLi}P>{~GeYW*;u zqeN509alB+jhY=crz9y(+DJJpYJYBU*4`EyERtZUb+LsM<&P>a8z=R$1oY}Wrb$5&w_l@C*_2+QRmfNi$<$RyphZ;xD484B6T84=vx~Bv*f}?AY@K zQD__Kre#Pp%Tx(>&i&z@m=O9gEjY^aqsZh;dU$M&ldcLpGG?XwZA?rLVJ|wgV^4oY z#U;%mqwTxrlBp_4lFOGo-UjFDZ#)nc|qnBxIDZ zZVIHvo>~8$FxgRZif~y8oPHuQ#OCO~MA$U^oa3$}NLS>}s?5Gdp{@1{M#EoUc@1CS z%%5$kV+}@b(+(_7Q=4+El$ODiQSQ|tDt{Qdyr~VyKC^xWpP(MpMcIf7T-G(Fsven0 z8LOl+rr{15#a3N|kE6_Y#Qncx53Jdk*k`(1vGQw2my;t76VYj1? zuLl&;ESuxC+tH2s21CEesCLrEW}nBf&P@Nj>8D6{jvK2Lh8VVvi1jv7qSdO(?jGAA$1KG zZlQHnq&Mqa?n3x9+Pg_uaH`vp$H*q5ZUdcC72aJ!tA5NZeujl1kn3ge{J2=YRB3% z0iY`63=W6xz@~7U#OINRZ^$hKxrO70`?oMVbKpSoU^XgqTK~?`gw1V|^lyqEf}QG& zV&1KPd-pyG?4=M9H|=)#6G~3u*ci*o?&zQXF41Cc*s!n-~7U(=mbjPysuaZ%fpG6OfTsekFTrTp2N2&eGXc*S*55E7ET@9gwmoqvd0h^iQjLm z5J^X@J46BCH;#Hs^tr~ilg>(i=*OpzrMI0OKR%IuEY+|pCID_`c$HqhBg=T9ZX82l zX}yp1KL)nwzWy z$`sW1$~-h;yf>|qB*`EoIfQ>;yswYK_^0~?tQ9D!Lb7~-ctskURoeG~`3PYhs3!cn z5zQ#xMYrW9yK(gym>$lGA>$gJN>~DT(~;B4jvXUyt(86%cdSw^0vm(XOPE%7p763+ zypOI*MrM)fYeE(j$Jld}oX?B1ez7D5a&p#EZS?Z*&ChPP9Hj>yc)dTQ8GK*shX1Wvm7uoG!)^!Os+RV} z+f9|R?eUXYhK%i|Z4-gqVF%p*k5y_1YP(bf@Gj_Vht4LpVgxx}g(LvCLXIlBdg>t) z_^0Lw96p8@k@P`XIMsRYqd3a&qxcoai~TNe=nFif2s}t_f}cdb)4L;ch_~9=c3n*U6P8O5-DzeIa z!Yx)&Bbk4TZAq4sVPi#R$PQgq2Yn>#{fv1qCd_B5JFDZ^EKa*I2J#mv$l2)TR4f}! zm*KnV3}@tM1_b0usZc*IA&Ofaq-1lmpf1kBHG50HF2CgYrAMg)@OU=!U(=S&l32&M z9!MHv{kS@)6z;3)fB{k4T?LHxKFvwoM7NG*@mFtr;z%(Y`T($1PS*3@{>`C?qk#mS zO6aUqAg+aY>(3quTrWZG3BiH zZ88e!l~qdL#l4Fvze@E;OPcHWF1U{uiMuD`Z@S;zzJe8?JHZ%lpeIQy+4l7~{I;*E>zH-@!X zL+8u*!a{t%_@5#amgg$Gy`S-)-o})=kVZDAdwSAkRzmm0B)N$GXhs>&g?l5G9!1{1 zIuL%vXjCyor59YSd7tjxCBn$01}0HDvlaPZiJc>AqlwKw_WT=38ngl5H0iHj*Sd&! zk0UQ5Cm)JR49T`r74-q;4#D z*xP1?iCW`NO4m#$Z=CUvE`Xxi)E?McI2rk~8d4 z@xg>qJ(eakV!$94w*SgAW}4S_(Py(NG2*mP+h}q&42`Dlv^~%zCB9frf^^>-wv8{Y_rlJ*mW8db&+?zf8anxGj4#a8y0xJ6t{UCnOdB zp_N)GMn9~lT36lL&{^JZn=2+2f72!At{&M)+20A9`CazrqlYftD_-vcB5l;_dqhpJ zD#tW-LIuSA$FZ4}s3~uGAZV2UQW4ZNTF7nnSu@#ThiZB(s;mp_u#Qo^(j#(2wvxX% zsaz@pWr9xRH+F69>rLn&}weQQv)8l7``S`&2+utj7-roPvuD_B_NT?wehN zr@f@CuSv_opJ2lQ=jkILhFvcx|D-Z71WHiKX=C}&B_%O1m}O#5%nxQ^Q&&R!T#zbs z-;{}owKTvk2M|HVlsE>4wNJ#-^N3mRYcVenw;TqUsLdPf`eOoOAG?M(QHlaD@rf-r zsTmF%P7}f~Y6<;WG2}+Wi-UAQ)gcG73w`E+zi=%t1gxVn{r&u!6!K_S%)i&g)-52D z=(dcb^yY_6=D)=?S6(!y0sv2;c3Pbef_7R>(Ww9j?(caTVA}$lmjAv8Kury&P1-$i%$#lAC|lNed@> z>E7PiTy=SpbDm|VQ`lG$r#nQ(Rxbi8eaEi3@$!wxhI90K5;C5YpOeNZGKRAn0A1zm zGTz{0)4IfR&zJ3m$X|LdeY^THsOuPPZNtsqA}KW4-zw|dohG5u^Xl~X`cyYbZmQN@ zz>E9-siy>R(@&Nu!ksCIU~Nb~@Z=RbX<9%SenfHFvASrleM$M(wCVuT)d3g0Q=ReFiDF9}fZ=lG@pTDk7AGxA$GvRWRSKh}%_bykbPX&%H zokt|OILG2X$4|lSSe}qU0;=mZbe^Bz`MjQeNT;kcwJoGfc|N@R>?bkbaa&}V-M)_U zhgab=@1vCHH6c)qmvihG&jAABk}z!QRhv$3L4pqFtxjl_c--6Q*>~*;# zLYyQUzWeZiZqj)#h*tj@hGF88@N5ZIonDbjLSk*%-icfkMIh>Qt_Xs&{+UCK-P%C9 zVn|>Gm1{dFmdbM$Mqs9kCB78@%=+ePbCm+G-|KL&BFfDh=^mb-*#v$WXwnE4_=Knv z{Re7f`ZG5~A9&3n5)4>*HbkWB9xjDZ;2CaChzx8)X3gRnvHeoPu+E~hjUnw&ify_N z`n&NC^Qp1Tkv!=oG??679O_HHu?f6B?0!JAt_h?F22_*u5h@6EBK0EC3C z)ZxWXwjuKevjLGER+#yO9m)H}hdpGreT`q6ee69mc^6-x?%TP$iQT7i>Dh)O zSh@Mr0(Yr-RN0%(r5M#tvrDSYudFg(eV#%UI_2Vz#J%~{BvFLOt}Ts`r~{x((AuWk z7_-v`N~W@!h5cZ4C?;GNu|IgBgA;n~8Wf&)O3N5r8bu+iN&b?%kFet}ms7+1kZoX> z1qn%f=ctOxSCW6oTqbYqQ9O4t_{+fxR`&|c-*y>LqcFhWTkPxUnbpF0{67@kfeWD< ze2cjAN-Mmkd;L;u{^;f!12)k@NzdO$o^hIZL=GvhXspn@h+8e_leeiG(UN`EahUIz zIDdSlHNciLPR5qwYR0U&xE%U!K6D8TuF*~cdz^uSWMpVbw&TRygRl%-=B9+XA%$T@ z;qvhdzIn8cId-vHjw`IV=9x&Ym!51c(_%vbj$JT93H)09^1fvT5v1q1H1 z#Je`Jf2_ifaGwF0qZcNgr^(@{91LD^MC`0}%pQoX6V~Hr3Q-Y82@Ufg-o}&JtLt(= zdB~14;ih=~887s(ICw#|aA!)3%E%rdvJ$`FD1rro4XUMEszI;=HY#HWsu2^TL0TP% zw-2m8--?zGe%7C`yvTYy(=oC%Xjty9sodH<>stHgxni7FGoxo>Woo47Se*IUP3TAa zXUfdoy{uhj?A%9d2v0FJ)7wBx(zgkz_;2%2ZLed2Z0|sS{)5p$K~mxjn33$oSDx21I{c^P&Td4WJdgD>Od!uRTVGUc9 zsOD9usG7R3<+DutWh_aad+opw8@11Fi(pRWqd{M>8tbf~p4x_qf0I7lSv0H3OKW!KAC^611$^_l*2O5YIU-yh$2 z0&efX0iMPNiur5iz|wN8PDP_r+2G0hx}G5U@aFR!=U7!SP` zv!wDEJ%+@!MzhlpwrJ|-HTeCy5ps~cmLogb$QO5Sy2dT)+<+wQcyVZiO&F zpQJvidD%k8Y`02G4wit&=1!I(hH(n<_Cemqv4Q8viK06)Nd z$(n91J7Z%o`x#X*7ZU6FsavOL$cb+`CYL%=6|X28&bQrdg~bNc{SxU#Sx^#L2Sm2; zzN6Q1578Mg;AuO}>sCIO=ca2-N05S|%>sUh+ zW4BuyjpM0WHcMXaR@RL+LW42bTc=hTb7%>v$g=0eXj7x|9!(* z^0+vrzg~bed@8NH2S+Y)_JBnv8nG)ntS?(XqnOO}_#CZ0^+Z&EhSMCm(5M}X zm`Nefz*k-osMhuUU+v*aYD4ZGkn;!@M+~d)|82i(VnFrJG|sFoBD5}+L^@mF$o0m{ z3)=gaPh;VSQz|hOwOuIZ<2`J}5JUW8EM~=V`n>|jj*BmnKHJ1&a%q6HY#d_cy^6j> z;_>FxMNaXqR7TPde@fSIf3E)6e7|jo%);k)&1s6&09aJV_Mp=r-Cl8=or`AQ%E>$Q z``yqGS=2{Z()k->Jk1Yco=Lyk82aXBY9XQ3>$;Qq-I99bu6>VOvB-U>pDBrFdPHIR#Emg<<+2dLiPj1O=UvS2ElK+?m{ zF`<)o?P=vP6=uly+JS9Ue`aClX4l#THBzI*?!1AK#En!{zgif-OzQlGH=!LegnfN@ zEn7dUSoj_m*unH?W~enVPGV3PgCf55hefW&Q&Q{qXI%7fe2fjZ@FpVoHnZ3|?nDyg z6$A|yrQ&a1z*PUK#xVCaa}J%R1k{ZD8s|b<3t5jRClMjYwD*1JyT7C+Lo73|K|sI@ zobylq*FiQe?~QE6P8>xG-o@5UAtPu*2{+LNOH_Sm5wVw#4hJkfEe8{Ih3%A{;FXAj z+WS5O=@s@nCZF-`St^eYi%xsd-oI0x6P%U0Of?v%;%!nkZUB#5%|p+tw1-_C6tbZD zL90*l%&ud_!d!=~ZI7nEeCyAeUe|15sFpzwNSnCS^eu3~U;C#K?RWCI8liM_Utn&& z?&DL_xdmAng;|zeE)HiuwOTFBB^>tm+IgAC$|M`*G%LH7p5k%k@+D_0C8BMB5>@t| zvxjp)780jEPYPKl@0>R(TD_cP4wgUc` z3)mysw{nw^D-def{0F)p>kT>seLMC10u~*q{0Aa#jtP-;T7UNl@@)3m8M>KS(jpo8 zVKG2WqS`^h4janNt40}V65fHd-zMD*b;~u=Gfzv$qmV+kdcbV-B++4#f$Fk6c$FN` zjW3N(&*Bj=UAyWKaXh+o_!ssrR-QL;3OwmATu!wkH`S3!WW70<@428(Z*DS|6!BTG z)vvV3?sG8n>>=!m1NJmUgsWqPS&rP?Q{;TI0NR9f+Yphi3`C}V>D-M*RuQ++?YMoH zAg!lPtmPO|Nu-xM^L_f8o67jCP-$8@%~+vy1lWG&OE?GCZH2dyl%>LfS@YbGfsH%< zq9!fuS`CEQw0pC2C3}SP`jh28`NDb{s(M$4WOKxe<4k@SA}Pg z!>3!_W5~wl`U1Udn|A2cuSr@Ss|{7Bv&hAksDN6!AVG98`PfZ( zu1%-GRm~w*4+>^?z0xoHJhm7)tGQOMW|}k{f^yD`PY_{#++;K2=Rz9qg%msjL3?4L zDDedPPXq+6Pj0V#^<5mu(o?zX+ogpDJ>`)0UV)R~eIIYzH{H^|h~9%g41{PsZ8i0^ zeRznfnEJ!ae-ir$ohzELu0HU1SU-K}-q{Mkh`cHSyzoCY$A|dM&H*+t7SAsE2$SI2Ajx^yhDzuaRkIu(m!^{bcl`fVCyBr zA6jA|VsMkv2h*hnDF@7d%xE!etTC`fH`T%Kq@WybUV^APAqKS1so|Do)%=)^p>~+P z66m3n$`19ihY@VHD{9nZaic+x0ETT>o~T#xaKI85`leLVymr#xL872 zjL58L3u42)OOc|Gl{|!n-fAlLz4C002nomTYZc5$e6c;*aptS4a(fsE6HHEAiC$ART zCIyY0^7Bn@a)Barg38xf+<_3yf7d*qLy9gdicR+O8SBcyRs(oGPnrxRl}^l*0pec` z=)ut6o;P?$S|^9Inm40{PM+v*6*SfblyiEmGY^k(L7s>~w81auZHK&_^8~AFGi#*$ zq%33@nTm){?^5o5gh$eXW(b{g(bk;?hx*SYoN#N!Ck!8CFbqX)|7n^ClcyBoef|-x z>IhpGMGQQ;wU+p2m(*HS)&^5aIze+=;YQnMPOY!aG3^pczTk>eLUa4i=@T%4p1HME z3Gf@%>werh$Dx*0T?zV*AF=t4Lp!aApzZDk6WvArpp{p{!re4RiDjx|O0vU{cA9Bw zC7^2Jyp(!~)5{HG!luNQkx_>Mp;+sL(kBLmOOtMQ1Vhd%9PxyfkuiHM@Kl7|-k{G% zINo-%A$pXCjW3=%e?&G}*R_jJrgJ~i zfhnno>$jA0qZ?GlM2aMD9~#iVv%d!&*o*LyODZJcI4=#5D|lr+7jiB8{9?TxV(t1sDsv*elnG1I=jMq~)IiMWcg^}en$(mWvEGP2RZ~2g z;>Qk09s|8rKP9rbz2qlzXv&_DqKh^0D4`apaXrc z{<-(2;4-ioUA!s$gbyyheI{=d?#Yp4iTBYM#=>XYb9(YHb7!3)zpa!?L^@_t|ZXja-Dd02p?q`-1?zy>y`_L}P9?_jD_5 z*d2|e-X~iI?2j-&5M%u3;rchTp;O0ZK|XPh(pOdkKaJGP1|BCjAm%*2Y6pR++hXPWA2+t<`$gt`ke0|a!pe6GKjv4|ch0QPOJ!X0vT)NhJ_39`Z1 zW?aBqkt#p!k$2B-RNe%UcIs)O1|Mr!IyD`}{Y0J4*e^Y`RTRc)YIWMU z#CNh85KY$Bm>e5pEH*YVG08F3mE;+CA>UL2Tnuk#sog-dp8k6bQGHk!^DO}=NC=s& z2W?uki#vMC328{kFC}i?TODt>%L+?L{E&fMeGM3jc=_?1NAK%Rk=60uXS;pJsjt5` z3OXB6s)MgN-8Z8lzO8>vI4nQZr!Qdz^%OOz#lCR6s{M(KwO1xmXAwG{ z>bs5!5%Co|4l6&VR%tx#^AxmV7M18(R^8uY>|2@YdAGk8rhKsXNFUgg&-FeoH=j>J zX@BoyP;>tK#KQdTL>MO-bx=t=;WMWeb4dY`_|DOl-_P{kAuGa<2fnHHTC!D zDIKMQy;GF04C0jBd?#jx!hCv?KYgBCRMHaL%oX`htBITS>nP7UYYVb=9sa}32{ip5LK74WK-c z6UR?ay?O5_*KzqZW2Cy<2OEwBOy;kvtQu>#1Lt?Fc>oG5$?`$ z-%Z@Lg61a)H`NO+nd=vbQl4Ob1urI!tBZzaMb>KO?!q*3O!3_KO7$q1%j^SkoNs<< zNS2c|Hbtu{0dYq82Y`6YhA^G1G<}XZ65|#b{k8%iRsayvg6(FLa+T7kS4ZTJF++8Uy{{m1gc_Zm?VkSRzVz%Ea z1SwbVZuF@#@S}aUM?Q;(sOAbGfjGfW4vp;n`0SJXQdd!iR$4(S;M8Z+{*R&?owQaP zgvE(J2{QSG$OJ|f!)vf(mj@^56o568f4|bL7-9$jAeI!?_4WZZ&NsJoT@~0x^${dW zzzO^8uo^TUfqzXg@$He}K5xrP;KbFu_TgWSFwTxS&va*CLt?W=WmMp)38|D)2M~A@C3(YS(6Se>D z;i>jfF@>J08_U6IJXv}}cPDmd160F3U^#oeDzfAE6(J|ldmJmI?65zYOfO?|HmF8I z5rBE~kutehNVI<%BVPuxSoPaoZxzdHMvtUK@J7&yE zVU@#cXQB_XL?9?sz&~zX>wK@0d&X$Bau5UVMaEVib9SG-??Yx-H{!MAVcz6ZCU3Kf zTFqf5<_*qbs>Y9dX3-Bo@C4sS_T<5_^d7~Pcl<_^!j>?t#Un~-V*v!?kL(h1eY6Sr zwu+)+@qhhz=?3092IuQQSEoQ9MemgjeLFX(0yfmU z%=kTP1M}|UYIGmw)Xp@dUB1!2Q*lcZgOzM@&EW!JMb6xr>fDCLghw|Hy+NQKLObU_ zJ<(+$NyQ1Mvwt?Uq|5skgCc*rEsMA8#=G`?H$VP=<;rs7-I|XMubp+zpFh5R4I`N! zew=*W|NqM7Cd;e#qawra{l1%vA|3Oz%#g3iQ2!6%$Goj`yVLGYgHsmms(U~4Z*vlZ zBmp-gcb~gW8nC12Vry&zy(fp=79n~>TCOWpM1bxueJVt#|6gbWWU6DU7Lf_BP3i|t zAA8vOJte{;kU3Oa3$Rqy_+~52tEnh&P&l#Ukxe17j8vC8PU>&CC?m~3VNDW;z?E9?`NgMn z@Z{A2f>M4H4C>PP=gzj_ z9-W$uOeBrXEm^^&*8;`k{Sk%|RH~JFop*9xk73fDr7@Ba>>#@QySSN{lE8jDh!KBo zu^|!Bg?u(lj;PIFrj8t*-{oU1h!Bp!LcdOFK8QY&x$-P+Bz2eG4>$jEv>+RQ#}Fh3 zC#M|CYr@xAy4N@!%Nsbt{a|G=US`#cFzvg&4~#PLVY|=h{?Xk}2uA z4kx%wsSkL66HbpB+!@2=7AkycKlR?1%e1utrEUG+T0WFEl|W}H*cb(Eje>Tx^E*SM z_9kHlReh*e_=Vxa1S!s64qiTyYI zNvWl+0zyCB-tX@TcFikv`!C*A$iIyh8bZ>MPiy@f z<4aCTt*ktjAfOc<S@FxtQ zJZbb3w85DM*fwZNt1vzzKzj#f zxcKyf^o*KO=^O1sWQB(VvnO9eU9s+bVr1JO^96w_L9hvq0CMLc3eZlpA`g&tejVhZ z!|-JGWLs6?#`bC`_l48rw1pT5#@i6+31kYD&f)qM_kz zc^F?R&Px~>jC?sGj6MC2QT`RS923U(X5nD|Nd^KUI|Rk26XTAO1@9q~E+770DXRV} zMyJ%M;hmBX3*1>Ja^NyY%I|hxXt$2jVZE<-(P^Be;WPVKj$SA zF~w@A!&|vU^Y25Sb`WHGh8DNnHK&XuBq`udN~0B9o0KeuIaeajm!K)V*0;B4}OKlzbY(G&#(&e61ONM>S;y1-R@>l)#WIs7f+s! zzHIY)`U-iw zqCF91*?(?~O6lrPz4@yE~Y88+AKIRLyRok)X_{pJ5`b_**znBv0@j9Q!&JWT4RjOEgoGn14xbP-e*!qao_Dh5za{kRk7&K@35{9tp_C<* z+LZBOB$@Gh)UeDqlDZU@T<5wUSs1KvjVWPl{X4}3EOz*s#ppQNXjc?n<9_{b?eDT? ziSoDd(3v&Sb_{7mDaWf|vj&L68swm%e+Vy46%-AX3l8gxuAVb{}9e#QF1az=%gt`8x>8%l$b{$mg-cbn-g@Wf5+Mh z8qS&n#H3Fssljc$D3s?$T|89gwc7%bsfn*erD0`?>ayPANqIycm1h+l$|zfH&)p@} zQ3ppj13kixu6gwSfpULWwIUK%p9{_5F|(Y{djC!v*{Dzi#@j&y^mPrJ&eMCqr^71}FUu!^h^MSU zWVfyebxHvN^^X{wJ=pd>OyZ;MwM-6={=@jPN1pf;#)yoh2h+wc=|^X#>b;@38(lGG z@k#J}@cwdw;lN*qoXU~?A<=V$+rTk8x8<{GEQkl1Mdvdnt7q1C(^CX&jkRJ90@1xO zkWdzeL+O4y7MX$P{ie9t%EE!9_12hHsBrt$^h2guyPQck1jfg+@iY#Nsi66Za8Ycy$%V6&D*i!rybM#7%v>r^O>l7VJ^ksnvr|dzlx3A6cyWf_! z5g0<--$MH+1TFBdu2Tlg@UtD#ItjD+qYfQFrKk5Pg1T4(U4&NF*!f@&Fk7I#Z+PayL1==PVf1EEL$VSDFKq;y&O$C{ zA!APcfAQwD;@28OKh(kJfJo!*o2oiYOWW$mi#irtfTjgeUC2mPQ!Ksl5Q-2n*D17O zb2dg)SeD@bVOvWUm~E`1CO;{nlP=o~QUsn1?HLzeyptFiPC*lw*C^ZkYo*y1WO%%B zKZ4?_M*o|uR&&SuE?>SYv(!0`^KD9+n-co>P9ls+T7u*^+Zu2NX-;+`csQ)o18inn zlNCatjG(COe`K;jPh@cV7^R_qr`i^zk?GlAh2o~WE8V8#92o9aLY!-s{{_NU?tMOm z;>}#@}D9DbL?wd{S zP4|OgLWrH6YFm#SYDfl((l*5x=3B4*A4ucPL|PkByOC!-{3v}nN-vaLm|u)4I1Hd> zbd|`6aSTJvmE;5!^8U#5R9O7n)b`ao39gY8G+*sZrJgNRdU|AiJwu&~H#bz<==sVl zXA{1DZ*G4uG5GV((<4Ox2`@mODs}m!zKPc-u-EOlU5i*DqUOZws3PTw?*0iV#n?_s z+E7d4lkmQ|roX0f67grIMvv05lZwAov&P0&W}zn3DhRl4L~O@aFYpGYgsOmkZ4vW@dH z@ADm0Vwt~9PpLP<*|bR|Uhw!1qT^&r!*M@xO9DGYeh1@N9EM9XA`7^SD%cogl-guU@i~p4bG)F0+SqY^W@9WaV`vI*mktGUrF}q`^UW}?e;xldHpSR~wTy8uKc3 z?j9hIm9(uTH%hFn9OdsRnE4g$p}oSBpaX1?v242!CujNylh(dgFa#(%zH$+@caJTY zf~eVF71r2ytF>A5(-!0-x~4!i)vl;Yfn~n!vZH^1XfRI#HJ_;vxDK54>(?5<$UFiA)3ta(hF)s2AdX0yMIZlqN>H z;r7%PyOEYlF_98wiJ2;IcZ~u&t zmPOX%$dT^;FcJ%GoV;#giaUqd2vH;~^zV-)H~oW235f+F4D}v^m3NE@zDry78u91e z@(Nj;#%!J#p{FnN)O`9#J@6}LAP7M(#2)88Xxo*j3V>d7=B#6al_o5EEAUd>BMI{0 zmx3T_`FYGRx5zND2o?MLP4hoL(WZpkXe$9jc_^KPnBqs#f-S;j#aogbD{|K4@W&alVLknEi1@7XzTVQt!ywU>yj+VPvzXcMu zz^E-Sy9?aK2W^3}EpX@-IH!-hc>kBNkGnWFW$fcFj!hZ+I8M8}UH6Mx9H-sgu6s@y`#4U!UPfbUl3Tq0YB1#N&ul9S{3%JH b|IY5~2>LpLzK)>xbp-zhO9m>j_r(DK2^6=Z diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 5d85b760a5e..f5aed68599d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -296,7 +296,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = for { extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation) - _ = logger.info(s"${extraSkeletonUpdates.length} extraSkeletonUpdates") existingVersion = annotation.version pendingAnnotationUpdates <- if (desiredVersion == existingVersion) Fox.successful(List.empty) else { From 282084f2cc32f2d28109bee3ccf69ebc1efe99ce Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 10 Dec 2024 12:47:10 +0100 Subject: [PATCH 294/361] unused import --- frontend/javascripts/oxalis/view/version_list.tsx | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index c135dee1bc5..301101b0804 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -9,7 +9,6 @@ import { downloadAnnotation, getNewestVersionForAnnotation, getAnnotationProto, - getTracingForAnnotationType, } from "admin/admin_rest_api"; import { handleGenericError } from "libs/error_handling"; import { From 6e3709a82483a3917d98bb06d652336c387cefbd Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 10 Dec 2024 13:45:28 +0100 Subject: [PATCH 295/361] fix that version restore view didnt open when loading a version that had no skeleton layer; also use ensureWkReady in most places instead of actively waiting on wk-ready action --- .../oxalis/model/sagas/annotation_saga.tsx | 22 ++++++++++++++++--- .../model/sagas/annotation_tool_saga.ts | 4 +++- .../oxalis/model/sagas/dataset_saga.ts | 4 +++- .../model/sagas/load_histogram_data_saga.ts | 3 ++- .../oxalis/model/sagas/mapping_saga.ts | 3 ++- .../oxalis/model/sagas/mesh_saga.ts | 2 +- .../oxalis/model/sagas/prefetch_saga.ts | 4 +++- .../oxalis/model/sagas/proofread_saga.ts | 4 +++- .../oxalis/model/sagas/save_saga.ts | 2 +- .../model/sagas/skeletontracing_saga.ts | 11 ++-------- .../oxalis/model/sagas/task_saga.tsx | 5 +++-- .../oxalis/model/sagas/undo_saga.ts | 3 ++- .../oxalis/model/sagas/volumetracing_saga.tsx | 3 ++- 13 files changed, 46 insertions(+), 24 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index fb3315f1150..255a47fbcfb 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -8,6 +8,7 @@ import { setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; +import * as Utils from "libs/utils"; import type { EditableAnnotation } from "admin/admin_rest_api"; import type { ActionPattern } from "redux-saga/effects"; import { editAnnotation, acquireAnnotationMutex } from "admin/admin_rest_api"; @@ -46,6 +47,8 @@ import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper" import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; +import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; +import { ensureWkReady } from "./ready_sagas"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. @@ -114,6 +117,14 @@ function* pushAnnotationLayerUpdateAsync(action: EditAnnotationLayerAction): Sag ); } +export function* checkVersionRestoreParam(): Saga { + const showVersionRestore = yield* call(Utils.hasUrlParam, "showVersionRestore"); + + if (showVersionRestore) { + yield* put(setVersionRestoreVisibilityAction(true)); + } +} + function shouldDisplaySegmentationData(): boolean { const state = Store.getState(); const currentViewMode = state.temporaryConfiguration.viewMode; @@ -181,7 +192,7 @@ export function* warnAboutSegmentationZoom(): Saga { } } - yield* take("WK_READY"); + yield* call(ensureWkReady); // Wait before showing the initial warning. Due to initialization lag it may only be visible very briefly, otherwise. yield* delay(5000); yield* warnMaybe(); @@ -227,7 +238,7 @@ export function* watchAnnotationAsync(): Saga { } export function* acquireAnnotationMutexMaybe(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); const annotationId = yield* select((storeState) => storeState.tracing.annotationId); if (!allowUpdate) { @@ -334,4 +345,9 @@ export function* acquireAnnotationMutexMaybe(): Saga { } yield* takeEvery("SET_OTHERS_MAY_EDIT_FOR_ANNOTATION", reactToOthersMayEditChanges); } -export default [warnAboutSegmentationZoom, watchAnnotationAsync, acquireAnnotationMutexMaybe]; +export default [ + warnAboutSegmentationZoom, + watchAnnotationAsync, + acquireAnnotationMutexMaybe, + checkVersionRestoreParam, +]; diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts index 84f4e720c6e..480df8f7071 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts @@ -11,8 +11,10 @@ import { getNextTool } from "oxalis/model/reducers/reducer_helpers"; import { getToolClassForAnnotationTool } from "oxalis/controller/combinations/tool_controls"; import getSceneController from "oxalis/controller/scene_controller_provider"; import { AnnotationToolEnum, MeasurementTools } from "oxalis/constants"; +import { ensureWkReady } from "./ready_sagas"; + export function* watchToolDeselection(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); let previousTool = yield* select((state) => state.uiInformation.activeTool); while (true) { diff --git a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts index b10c681ba5c..1980d5cb65f 100644 --- a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts @@ -23,6 +23,7 @@ import { type EnsureSegmentIndexIsLoadedAction, setLayerHasSegmentIndexAction, } from "../actions/dataset_actions"; +import { ensureWkReady } from "./ready_sagas"; export function* watchMaximumRenderableLayers(): Saga { function* warnMaybe(): Saga { @@ -148,7 +149,8 @@ export function* watchZ1Downsampling(): Saga { Toast.close("DOWNSAMPLING_CAUSES_BAD_QUALITY"); } } - yield* take("WK_READY"); + + yield* call(ensureWkReady); yield* call(maybeShowWarning); yield* takeLatest( ["ZOOM_IN", "ZOOM_OUT", "ZOOM_BY_DELTA", "SET_ZOOM_STEP", "SET_STORED_LAYOUTS"], diff --git a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts index ad9b3249cf6..d8089209072 100644 --- a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts @@ -10,9 +10,10 @@ import { getHistogramForLayer } from "admin/admin_rest_api"; import type DataLayer from "oxalis/model/data_layer"; import { Model } from "oxalis/singletons"; import type { Vector2 } from "oxalis/constants"; +import { ensureWkReady } from "./ready_sagas"; export default function* loadHistogramDataSaga(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEvery("RELOAD_HISTOGRAM", reloadHistogramForLayer); const dataLayers: Array = yield* call([Model, Model.getColorLayers]); diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index e67cd4aadf9..d26dabd183b 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -73,6 +73,7 @@ import { fastDiffSetAndMap, sleep } from "libs/utils"; import type { Action } from "../actions/actions"; import type { ActionPattern } from "redux-saga/effects"; import { listenToStoreProperty } from "../helpers/listener_helpers"; +import { ensureWkReady } from "./ready_sagas"; type APIMappings = Record; type Container = { value: T }; @@ -139,7 +140,7 @@ export default function* watchActivatedMappings(): Saga { }; // Buffer actions since they might be dispatched before WK_READY const setMappingActionChannel = yield* actionChannel("SET_MAPPING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeLatest(setMappingActionChannel, handleSetMapping, oldActiveMappingByLayer); yield* takeEvery( "ENSURE_LAYER_MAPPINGS_ARE_LOADED", diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 962438e9da0..bc56aad1fa3 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -1203,7 +1203,7 @@ export function* handleAdditionalCoordinateUpdate(): Saga { // We want to prevent iterating through all additional coordinates to adjust the mesh visibility, so we store the // previous additional coordinates in this method. Thus we have to catch SET_ADDITIONAL_COORDINATES actions in a // while-true loop and register this saga in the root saga instead of calling from the mesh saga. - yield* take("WK_READY"); + yield* call(ensureWkReady); let previousAdditionalCoordinates = yield* select((state) => state.flycam.additionalCoordinates); const { segmentMeshController } = yield* call(getSceneController); diff --git a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts index 49e160331f1..56ae60380aa 100644 --- a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts @@ -21,6 +21,7 @@ import { Model } from "oxalis/singletons"; import type { Vector3 } from "oxalis/constants"; import constants from "oxalis/constants"; import { WkDevFlags } from "oxalis/api/wk_dev"; +import { ensureWkReady } from "./ready_sagas"; const PREFETCH_THROTTLE_TIME = 50; const DIRECTION_VECTOR_SMOOTHER = 0.125; @@ -28,7 +29,8 @@ const prefetchStrategiesArbitrary = [new PrefetchStrategyArbitrary()]; const prefetchStrategiesPlane = [new PrefetchStrategySkeleton(), new PrefetchStrategyVolume()]; export function* watchDataRelevantChanges(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); + const previousProperties = {}; // Initiate the prefetching once and then only for data relevant changes yield* call(triggerDataPrefetching, previousProperties); diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 6f93c64d76d..8f38a2f68bc 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -79,6 +79,7 @@ import { takeEveryUnlessBusy } from "./saga_helpers"; import type { Action } from "../actions/actions"; import { isBigInt, isNumberMap, SoftError } from "libs/utils"; import { getCurrentMag } from "../accessors/flycam_accessor"; +import { ensureWkReady } from "./ready_sagas"; function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { return function* (...args: any[]) { @@ -96,7 +97,8 @@ function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { export default function* proofreadRootSaga(): Saga { yield* take("INITIALIZE_SKELETONTRACING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); + yield* takeEveryUnlessBusy( ["DELETE_EDGE", "MERGE_TREES", "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS"], runSagaAndCatchSoftError(handleSkeletonProofreadingAction), diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 79137a1a781..299024f09f0 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -394,7 +394,7 @@ export function* setupSavingForTracingType( | SkeletonTracing; let prevFlycam = yield* select((state) => state.flycam); let prevTdCamera = yield* select((state) => state.viewModeData.plane.tdCamera); - yield* take("WK_READY"); + yield* call(ensureWkReady); while (true) { if (saveQueueType === "skeleton") { diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index ffb3b6b68d7..f532b4d0021 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -57,7 +57,6 @@ import { setPositionAction, setRotationAction, } from "oxalis/model/actions/flycam_actions"; -import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import DiffableMap, { diffDiffableMaps } from "libs/diffable_map"; import EdgeCollection, { diffEdgeCollections } from "oxalis/model/edge_collection"; import ErrorHandling from "libs/error_handling"; @@ -86,6 +85,7 @@ import { } from "oxalis/model/actions/connectome_actions"; import type { ServerSkeletonTracing } from "types/api_flow_types"; import memoizeOne from "memoize-one"; +import { ensureWkReady } from "./ready_sagas"; function* centerActiveNode(action: Action): Saga { if ("suppressCentering" in action && action.suppressCentering) { @@ -215,18 +215,12 @@ export function* watchTreeNames(): Saga { } } } -export function* checkVersionRestoreParam(): Saga { - const showVersionRestore = yield* call(Utils.hasUrlParam, "showVersionRestore"); - if (showVersionRestore) { - yield* put(setVersionRestoreVisibilityAction(true)); - } -} export function* watchAgglomerateLoading(): Saga { // Buffer actions since they might be dispatched before WK_READY const channel = yield* actionChannel("LOAD_AGGLOMERATE_SKELETON"); yield* take("INITIALIZE_SKELETONTRACING"); - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEvery(channel, loadAgglomerateSkeletonWithId); } export function* watchConnectomeAgglomerateLoading(): Saga { @@ -475,7 +469,6 @@ export function* watchSkeletonTracingAsync(): Saga { yield* throttle(5000, "PUSH_SAVE_QUEUE_TRANSACTION", watchTracingConsistency); yield* fork(watchFailedNodeCreations); yield* fork(watchBranchPointDeletion); - yield* fork(checkVersionRestoreParam); } function* diffNodes( diff --git a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx index 05c86faa522..57d39051852 100644 --- a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx @@ -26,6 +26,7 @@ import Store, { type RecommendedConfiguration } from "oxalis/store"; import Toast from "libs/toast"; import messages from "messages"; import renderIndependently from "libs/render_independently"; +import { ensureWkReady } from "./ready_sagas"; function* maybeShowNewTaskTypeModal(taskType: APITaskType): Saga { // Users can acquire new tasks directly in the tracing view. Occasionally, @@ -130,7 +131,7 @@ function* maybeActivateMergerMode(taskType: APITaskType): Saga { } export default function* watchTasksAsync(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); const task = yield* select((state) => state.task); const activeUser = yield* select((state) => state.activeUser); const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); @@ -201,7 +202,7 @@ export function* warnAboutMagRestriction(): Saga { } } - yield* take("WK_READY"); + yield* call(ensureWkReady); // Wait before showing the initial warning. Due to initialization lag it may only be visible very briefly, otherwise. yield* delay(5000); yield* warnMaybe(); diff --git a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts index fc88d2a3e1b..89fdeaef6a7 100644 --- a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts @@ -48,6 +48,7 @@ import { Model } from "oxalis/singletons"; import type { SegmentGroup, SegmentMap, SkeletonTracing, UserBoundingBox } from "oxalis/store"; import type { Task } from "redux-saga"; import { actionChannel, all, call, delay, fork, join, put, take } from "typed-redux-saga"; +import { ensureWkReady } from "./ready_sagas"; const UndoRedoRelevantBoundingBoxActions = AllUserBoundingBoxActions.filter( (action) => action !== "SET_USER_BOUNDING_BOXES", @@ -175,7 +176,7 @@ export function* manageUndoStates(): Saga { } > = {}; - yield* take("WK_READY"); + yield* call(ensureWkReady); // Initialization of the local state variables from above. prevSkeletonTracingOrNull = yield* select((state) => state.tracing.skeleton); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index c2afb5c4f2b..3a20d7ec931 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -113,11 +113,12 @@ import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_sag import messages from "messages"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import type { ActionPattern } from "redux-saga/effects"; +import { ensureWkReady } from "./ready_sagas"; const OVERWRITE_EMPTY_WARNING_KEY = "OVERWRITE-EMPTY-WARNING"; export function* watchVolumeTracingAsync(): Saga { - yield* take("WK_READY"); + yield* call(ensureWkReady); yield* takeEveryUnlessBusy( "INTERPOLATE_SEGMENTATION_LAYER", maybeInterpolateSegmentationLayer, From f8b830b6b832a8f857edb54a26a0dc635e7307c8 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 10 Dec 2024 13:56:37 +0100 Subject: [PATCH 296/361] remove unused imports --- frontend/javascripts/oxalis/model/sagas/dataset_saga.ts | 2 +- .../javascripts/oxalis/model/sagas/load_histogram_data_saga.ts | 2 +- frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts index 1980d5cb65f..9ba779b6de3 100644 --- a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts @@ -1,4 +1,4 @@ -import { call, put, take, takeEvery, takeLatest } from "typed-redux-saga"; +import { call, put, takeEvery, takeLatest } from "typed-redux-saga"; import { sum } from "lodash"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; diff --git a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts index d8089209072..9cee9a4ba36 100644 --- a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts @@ -1,6 +1,6 @@ import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; -import { call, take, takeEvery, put } from "typed-redux-saga"; +import { call, takeEvery, put } from "typed-redux-saga"; import { setHistogramDataForLayerAction, updateLayerSettingAction, diff --git a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts index 56ae60380aa..d06315bedd6 100644 --- a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts @@ -8,7 +8,7 @@ import { } from "oxalis/model/bucket_data_handling/prefetch_strategy_plane"; import { getGlobalDataConnectionInfo } from "oxalis/model/data_connection_info"; import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { throttle, call, take } from "typed-redux-saga"; +import { throttle, call } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; import { getPosition, From 05a59565c01dac52cef4a04cae76004e8d653e28 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 10 Dec 2024 15:48:47 +0100 Subject: [PATCH 297/361] use toasts related to proofreading undo --- frontend/javascripts/messages.tsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/messages.tsx b/frontend/javascripts/messages.tsx index bdfbd7dd051..98ef35199a2 100644 --- a/frontend/javascripts/messages.tsx +++ b/frontend/javascripts/messages.tsx @@ -130,8 +130,9 @@ A reload is necessary to return to a valid state.`, "There is no action that could be undone. However, if you want to restore an earlier version of this annotation, use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "undo.no_redo": "There is no action that could be redone.", "undo.no_undo_during_proofread": - "Undo is not supported during proofreading yet. Please manually revert the last action you took.", - "undo.no_redo_during_proofread": "Redo is not supported during proofreading yet.", + "Undo is not supported during proofreading yet. Please use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", + "undo.no_redo_during_proofread": + "Redo is not supported during proofreading yet. Please use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "undo.import_volume_tracing": "Importing a volume annotation cannot be undone. However, if you want to restore an earlier version of this annotation, use the 'Restore Older Version' functionality in the dropdown next to the 'Save' button.", "download.wait": "Please wait...", From 47eda7852d1f64fc72abc2221e59e414deed4e80 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 09:47:30 +0100 Subject: [PATCH 298/361] add custom start time argument to migration --- tools/migration-unified-annotation-versioning/main.py | 3 ++- tools/migration-unified-annotation-versioning/migration.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 221e1646142..8f8e3029114 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -18,7 +18,8 @@ def main(): parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") - parser.add_argument("--previous_start", help="Previous run start time. Example: 2024-11-27 10:37:30.171083", type=str) + parser.add_argument("--previous_start", help="Previous run start time. Only annotations last modified after that time will be migrated. Use for second run in incremental migration. Example: 2024-11-27 10:37:30.171083", type=str) + parser.add_argument("--start", help="Run “start time”. Only annotations last modified before that time will be migrated. Defaults to now. Change if FossilDB content is not up to date with postgres. Example: 2024-11-27 10:37:30.171083", type=str) parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") parser.add_argument("--previous_checkpoints", help="Supply checkpoints file of a previous run to resume", type=str) parser.add_argument("--verbose", "-v", help="Print for every annotation", action="store_true") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 7947a72c9cf..348c45aaf19 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -433,7 +433,9 @@ def skeleton_may_have_pending_updates(self, annotation) -> bool: def read_annotation_list(self): checkpoint_set = self.read_checkpoints() before = time.time() - start_time = datetime.datetime.now() + start_time = str(datetime.datetime.now()) + if self.args.start is not None: + start_time = self.args.start previous_start_label = "" previous_start_query = "" if self.args.previous_start is not None: From bfc5cf658364a1298201c1a4600d5ecf5bc45f75 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 10:58:43 +0100 Subject: [PATCH 299/361] set editableMappingsMayHavePendingUpdates in migration --- .../Annotation_pb2.py | 12 ++++++------ .../migration.py | 4 +++- webknossos-datastore/proto/Annotation.proto | 1 + 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py index d8297d55911..9b1dc62ed74 100644 --- a/tools/migration-unified-annotation-versioning/Annotation_pb2.py +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -13,17 +13,17 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\xd9\x01\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _ANNOTATIONLAYERTYPEPROTO._serialized_start=418 - _ANNOTATIONLAYERTYPEPROTO._serialized_end=470 + _ANNOTATIONLAYERTYPEPROTO._serialized_start=465 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=517 _ANNOTATIONPROTO._serialized_start=61 - _ANNOTATIONPROTO._serialized_end=278 - _ANNOTATIONLAYERPROTO._serialized_start=281 - _ANNOTATIONLAYERPROTO._serialized_end=416 + _ANNOTATIONPROTO._serialized_end=325 + _ANNOTATIONLAYERPROTO._serialized_start=328 + _ANNOTATIONLAYERPROTO._serialized_end=463 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 348c45aaf19..5df336d671a 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -393,6 +393,7 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) + editable_mapping_may_have_pending_updates = bool(mapping_id_map) # same problem as with skeletons, see comment there earliest_accessible_version = 0 if len(mapping_id_map) > 0: # An editable mapping exists in this annotation. @@ -408,6 +409,8 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se annotationProto.earliestAccessibleVersion = earliest_accessible_version if skeleton_may_have_pending_updates: annotationProto.skeletonMayHavePendingUpdates = True + if editable_mapping_may_have_pending_updates: + annotationProto.editableMappingMayHavePendingUpdates = True for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id @@ -524,4 +527,3 @@ def setup_checkpoint_logging(self): logger.info(f"Writing checkpoint file at {checkpoint_file}") checkpoints_file_handler = logging.FileHandler(checkpoint_file) checkpoint_logger.addHandler(checkpoints_file_handler) - diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 9e276a67a42..09878eaba80 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -13,6 +13,7 @@ message AnnotationProto { repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; optional bool skeletonMayHavePendingUpdates = 5; // relevant only for annotations migrated by https://github.com/scalableminds/webknossos/pull/7917 + optional bool editableMappingsMayHavePendingUpdates = 6; // relevant only for annotations migrated by https://github.com/scalableminds/webknossos/pull/7917 } message AnnotationLayerProto { From aa327d07b046a497e9e749a7c3cf75812051c120 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 11:17:51 +0100 Subject: [PATCH 300/361] fetch extra editable mapping updates if needed --- .../annotation/AnnotationWithTracings.scala | 2 + .../annotation/TSAnnotationService.scala | 41 ++++++++++++++++++- 2 files changed, 41 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index ea7c1cc0837..78cb9c4aedb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -49,6 +49,8 @@ case class AnnotationWithTracings( def getSkeletonId: Option[String] = getSkeletons.headOption.map(_._1) + def getEditableMappingTracingIds: List[String] = editableMappingsByTracingId.keys.toList + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = editableMappingsByTracingId.view.flatMap { case (id, (info: EditableMappingInfo, _)) => Some(id, info) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index f5aed68599d..479921e342b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -120,9 +120,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def currentMaterializedVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - def currentMaterializedSkeletonVersion(tracingId: String): Fox[Long] = + private def currentMaterializedSkeletonVersion(tracingId: String): Fox[Long] = tracingDataStore.skeletons.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def currentMaterializedEditableMappingVersion(tracingId: String): Fox[Long] = + tracingDataStore.editableMappingsInfo.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def getNewestMaterialized(annotationId: String): Fox[AnnotationProto] = for { keyValuePair <- tracingDataStore.annotations.get[AnnotationProto](annotationId, mayBeEmpty = Some(true))( @@ -296,6 +299,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = for { extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation) + extraEditableMappingUpdates <- findExtraEditableMappingUpdates(annotationId, annotation) existingVersion = annotation.version pendingAnnotationUpdates <- if (desiredVersion == existingVersion) Fox.successful(List.empty) else { @@ -304,7 +308,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Some(desiredVersion), Some(existingVersion + 1))(fromJsonBytes[List[UpdateAction]]) } - } yield extraSkeletonUpdates ++ pendingAnnotationUpdates + } yield extraSkeletonUpdates ++ extraEditableMappingUpdates ++ pendingAnnotationUpdates /* * The migration of https://github.com/scalableminds/webknossos/pull/7917 does not guarantee that the skeleton layer @@ -340,6 +344,39 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss (version, updateGroupFiltered) } + // Same problem as with skeletons, see comment above + // Note that the EditableMappingUpdaters are passed only the “oldVersion” that is the materialized annotation version + // not the actual materialized editableMapping version, but that should yield the same data when loading from fossil. + private def findExtraEditableMappingUpdates(annotationId: String, annotation: AnnotationWithTracings)( + implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { + for { + updatesByEditableMapping <- Fox.serialCombined(annotation.getEditableMappingTracingIds) { tracingId => + for { + materializedEditableMappingVersion <- currentMaterializedEditableMappingVersion(tracingId) + extraUpdates <- if (materializedEditableMappingVersion < annotation.version) { + tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( + annotationId, + Some(annotation.version), + Some(materializedEditableMappingVersion + 1))(fromJsonBytes[List[UpdateAction]]) + } else Fox.successful(List.empty) + extraUpdatesForThisMapping = filterEditableMappingUpdates(extraUpdates, tracingId) + } yield extraUpdatesForThisMapping + } + } yield updatesByEditableMapping.flatten + } else Fox.successful(List.empty) + + private def filterEditableMappingUpdates(updateGroups: List[(Long, List[UpdateAction])], + tracingId: String): List[(Long, List[EditableMappingUpdateAction])] = + updateGroups.map { + case (version, updateGroup) => + val updateGroupFiltered = updateGroup.flatMap { + case a: EditableMappingUpdateAction if a.actionTracingId == tracingId => Some(a) + case _ => None + } + (version, updateGroupFiltered) + } + private def findTracingsForAnnotation(annotation: AnnotationProto)( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = From 2c746685e38c98d5df291879fa645d7bb5e8a6b3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 11:35:04 +0100 Subject: [PATCH 301/361] fix downloading older volumes. changelog + migration guide. --- .codespellrc | 2 +- CHANGELOG.unreleased.md | 4 +++- MIGRATIONS.unreleased.md | 3 +++ app/models/annotation/WKRemoteTracingStoreClient.scala | 2 +- .../controllers/VolumeTracingController.scala | 9 +++++++-- .../com.scalableminds.webknossos.tracingstore.routes | 2 +- 6 files changed, 16 insertions(+), 6 deletions(-) diff --git a/.codespellrc b/.codespellrc index 1a7bf949281..5f3a5847be4 100644 --- a/.codespellrc +++ b/.codespellrc @@ -1,6 +1,6 @@ [codespell] # Ref: https://github.com/codespell-project/codespell#using-a-config-file -skip = *.svg,*.sublime-workspace,*.lock,.codespellrc,./util/target/,./binaryData,./node_modules,./pg,./project/target,./target,./webknossos-datastore/target,./webknossos-jni/target,./webknossos-tracingstore/target,./util/target,./coverage,./public-test,./tools/proxy/node_modules,./docs/publications.md,./public/bundle +skip = *.svg,*.sublime-workspace,*.lock,.codespellrc,./util/target/,./binaryData,./node_modules,./pg,./project/target,./target,./webknossos-datastore/target,./webknossos-jni/target,./webknossos-tracingstore/target,./util/target,./coverage,./public-test,./tools/proxy/node_modules,./docs/publications.md,./public/bundle,./tools/migration-unified-annotation-versioning/venv # some names and camelCased variables etc ignore-regex = \b([a-z]+[A-Z][a-zA-Z]*|H Mattern|Manuel|Nat Commun)\b ignore-words-list = lod,nd,ue diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 42b34c92b86..e50c8975afd 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -12,6 +12,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Added - Added the total volume of a dataset to a tooltip in the dataset info tab. [#8229](https://github.com/scalableminds/webknossos/pull/8229) +- When using the “Restore older Version” feature, there are no longer separate tabs for the different annotation layers. Only one linear annotation history is now used, and if you revert to an older version, all layers are reverted. If layers were added/deleted since then, that is also reverted. This also means that proofreading annotations can now be reverted to older versions as well. The description text of annotations is now versioned as well. [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Changed - Renamed "resolution" to "magnification" in more places within the codebase, including local variables. [#8168](https://github.com/scalableminds/webknossos/pull/8168) @@ -23,7 +24,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Fixed - Fixed that listing datasets with the `api/datasets` route without compression failed due to missing permissions regarding public datasets. [#8249](https://github.com/scalableminds/webknossos/pull/8249) - Fixed a bug that uploading a zarr dataset with an already existing `datasource-properties.json` file failed. [#8268](https://github.com/scalableminds/webknossos/pull/8268) -- Fixed that the frontend did not ensure a minium length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244) +- Fixed that the frontend did not ensure a minimum length for annotation layer names. Moreover, names starting with a `.` are also disallowed now. [#8244](https://github.com/scalableminds/webknossos/pull/8244) - Fixed a bug where in the add remote dataset view the dataset name setting was not in sync with the datasource setting of the advanced tab making the form not submittable. [#8245](https://github.com/scalableminds/webknossos/pull/8245) - Fix read and update dataset route for versions 8 and lower. [#8263](https://github.com/scalableminds/webknossos/pull/8263) - Added missing legacy support for `isValidNewName` route. [#8252](https://github.com/scalableminds/webknossos/pull/8252) @@ -32,5 +33,6 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Removed - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) +- Removed the feature to downsample existing volume annotations. All new volume annotations had a whole mag stack since [#4755](https://github.com/scalableminds/webknossos/pull/4755) (four years ago). [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Breaking Changes diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 681f5a88093..824491a6cf4 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -8,6 +8,9 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased [Commits](https://github.com/scalableminds/webknossos/compare/24.12.0...HEAD) - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) +- The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) +- FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) +- The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one. The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Postgres Evolutions: - [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 21aafa0d73b..30c85a586e7 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -201,7 +201,7 @@ class WKRemoteTracingStoreClient( def getVolumeTracing(annotationId: ObjectId, annotationLayer: AnnotationLayer, - version: Option[Long] = None, + version: Option[Long], skipVolumeData: Boolean, volumeDataZipFormat: VolumeDataZipFormat, voxelSize: Option[VoxelSize]): Fox[FetchedAnnotationLayer] = { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 20af5081f6d..342491032bf 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -185,6 +185,8 @@ class VolumeTracingController @Inject()( } def allDataZip(tracingId: String, + annotationId: Option[String], + version: Option[Long], volumeDataZipFormat: String, voxelSizeFactor: Option[String], voxelSizeUnit: Option[String]): Action[AnyContent] = @@ -192,8 +194,11 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + _ <- bool2Fox(version.isDefined == annotationId.isDefined) ?~> "Volume data request with version needs passed annotationId" + annotationIdFilled <- Fox.fillOption(annotationId)( + remoteWebknossosClient.getAnnotationIdForTracing(tracingId)) + tracing <- annotationService.findVolume(annotationIdFilled, tracingId, version) ?~> Messages( + "tracing.notFound") volumeDataZipFormatParsed <- VolumeDataZipFormat.fromString(volumeDataZipFormat).toFox voxelSizeFactorParsedOpt <- Fox.runOptional(voxelSizeFactor)(Vec3Double.fromUriLiteral) voxelSizeUnitParsedOpt <- Fox.runOptional(voxelSizeUnit)(LengthUnit.fromString) diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 160d0f9b779..19b7ce3a685 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -20,7 +20,7 @@ POST /volume/save POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minMag: Option[Int], maxMag: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, annotationId: String, version: Option[Long]) -GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) +GET /volume/:tracingId/allDataZip @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataZip(tracingId: String, annotationId: Option[String], version: Option[Long], volumeDataZipFormat: String, voxelSize: Option[String], voxelSizeUnit: Option[String]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String, annotationId: String) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(tracingId: String) POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(tracingId: String) From 7695e46f29385240c4284226e36902676bcd3cfc Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 11:42:17 +0100 Subject: [PATCH 302/361] fix assertion in volume download --- .../tracingstore/controllers/VolumeTracingController.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 342491032bf..fda7eb50fd2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -194,7 +194,7 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { for { - _ <- bool2Fox(version.isDefined == annotationId.isDefined) ?~> "Volume data request with version needs passed annotationId" + _ <- bool2Fox(if (version.isDefined) annotationId.isDefined else true) ?~> "Volume data request with version needs passed annotationId" annotationIdFilled <- Fox.fillOption(annotationId)( remoteWebknossosClient.getAnnotationIdForTracing(tracingId)) tracing <- annotationService.findVolume(annotationIdFilled, tracingId, version) ?~> Messages( From 7332d786f6027355851350eb3354663d9534105f Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 11 Dec 2024 11:45:02 +0100 Subject: [PATCH 303/361] remove debug logging --- .../tracingstore/annotation/UpdateGroupHandling.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala index d363dc0b20a..831688fd1d9 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/UpdateGroupHandling.scala @@ -50,13 +50,11 @@ trait UpdateGroupHandling extends LazyLogging { (collectedAndNextVersion: (Seq[Seq[UpdateAction]], Long), updateGroupWithVersion) => val collected = collectedAndNextVersion._1 val nextVersion = collectedAndNextVersion._2 - logger.info(s"nextVersion: $nextVersion") if (updateGroupWithVersion._1 > nextVersion) { // We have not yet reached nextVersion. Skip to next element, Do not collect, do not change nextVersion (collected, nextVersion) } else { val revertSourceVersionOpt = revertSourceVersionFromUpdates(updateGroupWithVersion._2) - logger.info(f"revertSourceVersionOpt: $revertSourceVersionOpt") revertSourceVersionOpt match { // This group is a revert action. Set nextVersion to revertSourceVersion, do not collect this group case Some(revertSourceVersion) => (collected, revertSourceVersion) From 78eb6ff67055d3440c6e0a75f0f38ff31fd0cb15 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 11 Dec 2024 12:02:14 +0100 Subject: [PATCH 304/361] fix typing and CI --- frontend/javascripts/admin/admin_rest_api.ts | 15 +++++------ .../explorative_annotations_view.tsx | 1 + .../accessors/skeletontracing_accessor.ts | 4 +-- .../oxalis/model_initialization.ts | 25 +++++++++++++------ .../oxalis/view/jobs/train_ai_model.tsx | 4 +-- .../left-border-tabs/layer_settings_tab.tsx | 7 +++--- .../dataset_info_tab_view.tsx | 12 ++------- .../backend-snapshot-tests/annotations.e2e.ts | 4 +-- .../skeletontracing_server_objects.ts | 11 ++++---- .../fixtures/tasktracing_server_objects.ts | 7 +++--- .../fixtures/volumetracing_server_objects.ts | 7 +++--- frontend/javascripts/types/api_flow_types.ts | 6 +++-- 12 files changed, 56 insertions(+), 47 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index f80da9b70e5..e0f0aa3b93a 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -61,7 +61,7 @@ import { type LayerLink, type VoxelSize, type APITimeTrackingPerUser, - AnnotationLayerType, + AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_flow_types"; import type { AnnotationTypeFilterEnum, LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; @@ -532,7 +532,7 @@ export function setOthersMayEditForAnnotation( } type AnnotationLayerCreateDescriptor = { - typ: AnnotationLayerType; + typ: AnnotationLayerEnum; name: string | null | undefined; autoFallbackLayer?: boolean; fallbackLayerName?: string | null | undefined; @@ -596,6 +596,7 @@ export function duplicateAnnotation( }); } +// todop: rename to getUnversionedAnnotationInformation export async function getMaybeOutdatedAnnotationInformation( annotationId: string, options: RequestOptions = {}, @@ -649,14 +650,14 @@ export function createExplorational( if (typ === "skeleton") { layers = [ { - typ: AnnotationLayerType.Skeleton, + typ: AnnotationLayerEnum.Skeleton, name: "Skeleton", }, ]; } else if (typ === "volume") { layers = [ { - typ: AnnotationLayerType.Volume, + typ: AnnotationLayerEnum.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -667,11 +668,11 @@ export function createExplorational( } else { layers = [ { - typ: AnnotationLayerType.Skeleton, + typ: AnnotationLayerEnum.Skeleton, name: "Skeleton", }, { - typ: AnnotationLayerType.Volume, + typ: AnnotationLayerEnum.Volume, name: fallbackLayerName, fallbackLayerName, autoFallbackLayer, @@ -689,7 +690,7 @@ export async function getTracingsForAnnotation( version?: number | null | undefined, ): Promise> { const skeletonLayers = annotation.annotationLayers.filter( - (layer) => layer.typ === AnnotationLayerType.Skeleton, + (layer) => layer.typ === AnnotationLayerEnum.Skeleton, ); const fullAnnotationLayers = await Promise.all( annotation.annotationLayers.map((layer) => diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 9576a2324a3..2f34f59a8e7 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -707,6 +707,7 @@ class ExplorativeAnnotationsView extends React.PureComponent { title: "Stats", width: 150, render: (__: any, annotation: APIAnnotationInfo) => ( + // todop: don't use annotation.stats ), }, diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index 4384b1af562..44eb0c995ef 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -5,7 +5,7 @@ import { type ServerSkeletonTracing, type APIAnnotation, type AnnotationLayerDescriptor, - AnnotationLayerType, + AnnotationLayerEnum, } from "types/api_flow_types"; import type { Tracing, @@ -42,7 +42,7 @@ export function getSkeletonDescriptor( annotation: APIAnnotation, ): AnnotationLayerDescriptor | null | undefined { const skeletonLayers = annotation.annotationLayers.filter( - (descriptor) => descriptor.typ === AnnotationLayerType.Skeleton, + (descriptor) => descriptor.typ === AnnotationLayerEnum.Skeleton, ); if (skeletonLayers.length > 0) { diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index f864ed035a2..3bc2037fff1 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -138,21 +138,30 @@ export async function initialize( annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); } else { // todop: can we improve this? - let maybeOutdatedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); + let unversionedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); annotationProto = await getAnnotationProto( - maybeOutdatedAnnotation.tracingStore.url, - maybeOutdatedAnnotation.id, + unversionedAnnotation.tracingStore.url, + unversionedAnnotation.id, version, ); - const layersWithStats = annotationProto.annotationLayers.map((layer) => { + const layersWithStats = annotationProto.annotationLayers.map((protoLayer) => { return { - tracingId: layer.tracingId, - name: layer.name, - typ: layer.type, + tracingId: protoLayer.tracingId, + name: protoLayer.name, + typ: protoLayer.type, + stats: + // Only when the newest version is requested (version==null), + // the stats are available in unversionedAnnotation. + version == null + ? _.find( + unversionedAnnotation.annotationLayers, + (layer) => layer.tracingId === protoLayer.tracingId, + )?.stats ?? {} + : {}, }; }); const completeAnnotation = { - ...maybeOutdatedAnnotation, + ...unversionedAnnotation, description: annotationProto.description, annotationProto: annotationProto.earliestAccessibleVersion, annotationLayers: layersWithStats, diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 1390031e0c2..d11d9e8b522 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -35,7 +35,7 @@ import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; import { - AnnotationLayerType, + AnnotationLayerEnum, type APIAnnotation, type APIDataset, type ServerVolumeTracing, @@ -574,7 +574,7 @@ function AnnotationsCsvInput({ let userBoundingBoxes = volumeTracings[0]?.userBoundingBoxes; if (!userBoundingBoxes) { const skeletonLayer = annotation.annotationLayers.find( - (layer) => layer.typ === AnnotationLayerType.Skeleton, + (layer) => layer.typ === AnnotationLayerEnum.Skeleton, ); if (skeletonLayer) { const skeletonTracing = await getTracingForAnnotationType(annotation, skeletonLayer); diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 8645408a2a4..b83cc8664c1 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -21,8 +21,9 @@ import _ from "lodash"; import classnames from "classnames"; import update from "immutability-helper"; import { - AnnotationLayerType, + type AnnotationLayerType, APIAnnotationTypeEnum, + AnnotationLayerEnum, type APIDataLayer, type APIDataset, type APISkeletonLayer, @@ -629,7 +630,7 @@ class DatasetSettings extends React.PureComponent { const layer = getLayerByName(dataset, layerName); const isSegmentation = layer.category === "segmentation"; const layerType = - layer.category === "segmentation" ? AnnotationLayerType.Volume : AnnotationLayerType.Skeleton; + layer.category === "segmentation" ? AnnotationLayerEnum.Volume : AnnotationLayerEnum.Skeleton; const canBeMadeEditable = isSegmentation && layer.tracingId == null && this.props.controlMode === "TRACE"; const isVolumeTracing = isSegmentation ? layer.tracingId != null : false; @@ -1228,7 +1229,7 @@ class DatasetSettings extends React.PureComponent { {!isOnlyAnnotationLayer ? this.getDeleteAnnotationLayerButton( readableName, - AnnotationLayerType.Skeleton, + AnnotationLayerEnum.Skeleton, tracingId, ) : null} diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 789543c974f..70c03b8adf2 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -1,7 +1,7 @@ import type { Dispatch } from "redux"; import { Typography, Tag } from "antd"; import { SettingOutlined, InfoCircleOutlined, EditOutlined } from "@ant-design/icons"; -import { connect, useSelector } from "react-redux"; +import { connect } from "react-redux"; import Markdown from "libs/markdown_adapter"; import React, { type CSSProperties } from "react"; import { Link } from "react-router-dom"; @@ -210,8 +210,6 @@ export function AnnotationStats({ asInfoBlock: boolean; withMargin?: boolean | null | undefined; }) { - const dataset = useSelector((state: OxalisState) => state.dataset); - const annotation = useSelector((state: OxalisState) => state.tracing); if (!stats || Object.keys(stats).length === 0) return null; const formatLabel = (str: string) => (asInfoBlock ? str : ""); const useStyleWithMargin = withMargin != null ? withMargin : true; @@ -220,12 +218,6 @@ export function AnnotationStats({ const skeletonStats = getSkeletonStats(stats); const volumeStats = getVolumeStats(stats); const totalSegmentCount = volumeStats.reduce((sum, [_, volume]) => sum + volume.segmentCount, 0); - const segmentCountDetails = volumeStats - .map( - ([layerName, volume]) => - `

${getReadableNameForLayerName(dataset, annotation, layerName)}: ${volume.segmentCount} ${pluralize("Segment", volume.segmentCount)}

`, - ) - .join(""); return (
0 ? ( { t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); t.is(editedAnnotation.id, annotationId); - t.is(editedAnnotation.annotationLayers[0].typ, AnnotationLayerType.Skeleton); + t.is(editedAnnotation.annotationLayers[0].typ, AnnotationLayerEnum.Skeleton); t.is(editedAnnotation.annotationLayers[0].tracingId, "ae417175-f7bb-4a34-8187-d9c3b50143af"); t.snapshot(replaceVolatileValues(editedAnnotation)); await api.editAnnotation(annotationId, APIAnnotationTypeEnum.Explorational, { diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 0d2a638f470..3aea3fc9992 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -1,13 +1,13 @@ import { type ServerSkeletonTracing, type APIAnnotation, - AnnotationLayerType, + AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_flow_types"; const TRACING_ID = "47e37793-d0be-4240-a371-87ce68561a13"; export const tracing: ServerSkeletonTracing = { - typ: AnnotationLayerType.Skeleton, + typ: AnnotationLayerEnum.Skeleton, id: "47e37793-d0be-4240-a371-87ce68561a13", trees: [ { @@ -182,9 +182,10 @@ export const annotation: APIAnnotation = { }, annotationLayers: [ { - name: AnnotationLayerType.Skeleton, + name: AnnotationLayerEnum.Skeleton, tracingId: TRACING_ID, - typ: AnnotationLayerType.Skeleton, + typ: AnnotationLayerEnum.Skeleton, + stats: {}, }, ], dataSetName: "ROI2017_wkw", @@ -224,7 +225,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "skeleton layer name", - type: AnnotationLayerType.Skeleton, + type: AnnotationLayerEnum.Skeleton, }, ], }; diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 2ae24f42f5a..48dad8b83b6 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -1,7 +1,7 @@ import { type ServerSkeletonTracing, type APIAnnotation, - AnnotationLayerType, + AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_flow_types"; @@ -128,7 +128,8 @@ export const annotation: APIAnnotation = { { name: "Skeleton", tracingId: TRACING_ID, - typ: AnnotationLayerType.Skeleton, + typ: AnnotationLayerEnum.Skeleton, + stats: {}, }, ], dataSetName: "ROI2017_wkw", @@ -191,7 +192,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "Skeleton", - type: AnnotationLayerType.Skeleton, + type: AnnotationLayerEnum.Skeleton, }, ], }; diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index b207600ab60..47ae6232d22 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -1,7 +1,7 @@ import { type ServerVolumeTracing, type APIAnnotation, - AnnotationLayerType, + AnnotationLayerEnum, type APITracingStoreAnnotation, } from "types/api_flow_types"; @@ -93,7 +93,8 @@ export const annotation: APIAnnotation = { { name: "volume", tracingId: TRACING_ID, - typ: AnnotationLayerType.Volume, + typ: AnnotationLayerEnum.Volume, + stats: {}, }, ], dataSetName: "ROI2017_wkw", @@ -132,7 +133,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "volume", - type: AnnotationLayerType.Volume, + type: AnnotationLayerEnum.Volume, }, ], }; diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index e9d06197cb3..01f61ab5974 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -384,11 +384,12 @@ export enum TracingTypeEnum { volume = "volume", hybrid = "hybrid", } -export enum AnnotationLayerType { +export enum AnnotationLayerEnum { Skeleton = "Skeleton", Volume = "Volume", } -export type TracingType = keyof typeof TracingTypeEnum; +export type TracingType = "skeleton" | "volume" | "hybrid"; +export type AnnotationLayerType = "Skeleton" | "Volume"; export type APITaskType = { readonly id: string; readonly summary: string; @@ -472,6 +473,7 @@ export type AnnotationLayerDescriptor = { name: string; tracingId: string; typ: AnnotationLayerType; + stats: TracingStats | EmptyObject; }; export type EditableLayerProperties = { name: string; From 192e4b3056593af7a27c5896329d1c1a94a45bb9 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 11:19:17 +0100 Subject: [PATCH 305/361] fix requesting volume data from older version --- .../tracingstore/controllers/VolumeTracingController.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index fda7eb50fd2..76f32ed76dc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.tracingstore.controllers +import collections.SequenceUtils import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.ExtendedTypes.ExtendedString @@ -220,7 +221,11 @@ class VolumeTracingController @Inject()( log() { accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readAnnotation(annotationId)) { for { - tracing <- annotationService.findVolume(annotationId, tracingId) ?~> Messages("tracing.notFound") + requestedVersion <- SequenceUtils + .findUniqueElement(request.body.map(_.version)) + .toFox ?~> "All data requests must request the same volume version" + tracing <- annotationService.findVolume(annotationId, tracingId, requestedVersion) ?~> Messages( + "tracing.notFound") (data, indices) <- if (tracing.getHasEditableMapping) { val mappingLayer = annotationService.editableMappingLayer(annotationId, tracingId, tracing) editableMappingService.volumeData(mappingLayer, request.body) From 49406a56934ceb9d563062e7018a1bda575e4de2 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 11:20:07 +0100 Subject: [PATCH 306/361] if allDataZip is requested with annotationId, use that one also for the access check --- .../tracingstore/controllers/VolumeTracingController.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 76f32ed76dc..51d41f24383 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -193,7 +193,8 @@ class VolumeTracingController @Inject()( voxelSizeUnit: Option[String]): Action[AnyContent] = Action.async { implicit request => log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { + accessTokenService.validateAccessFromTokenContext( + annotationId.map(UserAccessRequest.readAnnotation).getOrElse(UserAccessRequest.readTracing(tracingId))) { for { _ <- bool2Fox(if (version.isDefined) annotationId.isDefined else true) ?~> "Volume data request with version needs passed annotationId" annotationIdFilled <- Fox.fillOption(annotationId)( From 9582dffd3222595bcade0a386bc444695a50d36c Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 11:42:37 +0100 Subject: [PATCH 307/361] fix test --- frontend/javascripts/test/sagas/annotation_saga.spec.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/test/sagas/annotation_saga.spec.ts b/frontend/javascripts/test/sagas/annotation_saga.spec.ts index 1e5cc81772d..d5019cdd121 100644 --- a/frontend/javascripts/test/sagas/annotation_saga.spec.ts +++ b/frontend/javascripts/test/sagas/annotation_saga.spec.ts @@ -3,7 +3,7 @@ import _ from "lodash"; import mockRequire from "mock-require"; import type { OxalisState } from "oxalis/store"; import { createMockTask } from "@redux-saga/testing-utils"; -import { take, put } from "redux-saga/effects"; +import { take, put, call } from "redux-saga/effects"; import dummyUser from "test/fixtures/dummy_user"; import defaultState from "oxalis/default_state"; import { expectValueDeepEqual } from "test/helpers/sagaHelpers"; @@ -12,6 +12,7 @@ import { setBlockedByUserAction, setOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; const createInitialState = (othersMayEdit: boolean, allowUpdate: boolean = true): OxalisState => ({ ...defaultState, @@ -62,7 +63,7 @@ function prepareTryAcquireMutexSaga(t: ExecutionContext, othersMayEdit: boolean) const listenForOthersMayEditMocked = createMockTask(); const storeState = createInitialState(othersMayEdit); const saga = acquireAnnotationMutexMaybe(); - expectValueDeepEqual(t, saga.next(), take("WK_READY")); + expectValueDeepEqual(t, saga.next(), call(ensureWkReady)); t.deepEqual( saga.next(wkReadyAction()).value.type, "SELECT", From aea43c6a0d1e7f9044f31e77587c1432986bb2c7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 11:43:28 +0100 Subject: [PATCH 308/361] clean up editAnnotation --- frontend/javascripts/admin/admin_rest_api.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e0f0aa3b93a..3cd941302e1 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -487,13 +487,11 @@ export function reOpenAnnotation( export type EditableAnnotation = { name: string; - description: string; visibility: APIAnnotationVisibility; tags: Array; viewConfiguration?: AnnotationViewConfiguration; }; -// todop: does this still work? discussion at https://github.com/scalableminds/webknossos/pull/7917#discussion_r1860594474 export function editAnnotation( annotationId: string, annotationType: APIAnnotationType, From e0be98cb03a5abec927762fb4822304a94e97805 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 11:44:49 +0100 Subject: [PATCH 309/361] rename to getUnversionedAnnotationInformation --- frontend/javascripts/admin/admin_rest_api.ts | 3 +-- .../admin/task/task_create_form_view.tsx | 4 ++-- .../javascripts/oxalis/model_initialization.ts | 5 ++--- .../oxalis/view/action-bar/merge_modal_view.tsx | 4 ++-- .../oxalis/view/jobs/train_ai_model.tsx | 4 ++-- frontend/javascripts/router.tsx | 4 ++-- .../test/backend-snapshot-tests/annotations.e2e.ts | 14 ++++++-------- 7 files changed, 17 insertions(+), 21 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 3cd941302e1..5fe11e13db5 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -594,8 +594,7 @@ export function duplicateAnnotation( }); } -// todop: rename to getUnversionedAnnotationInformation -export async function getMaybeOutdatedAnnotationInformation( +export async function getUnversionedAnnotationInformation( annotationId: string, options: RequestOptions = {}, ): Promise { diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 9f243221bf6..d956d15abc7 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -34,7 +34,7 @@ import { Vector3Input, Vector6Input } from "libs/vector_input"; import type { Vector3, Vector6 } from "oxalis/constants"; import { getActiveDatasetsOfMyOrganization, - getMaybeOutdatedAnnotationInformation, + getUnversionedAnnotationInformation, getProjects, getScripts, getTaskTypes, @@ -486,7 +486,7 @@ function TaskCreateFormView({ taskId, history }: Props) { } const annotationResponse = await tryToAwaitPromise( - getMaybeOutdatedAnnotationInformation(value, { + getUnversionedAnnotationInformation(value, { showErrorToast: false, }), ); diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 3bc2037fff1..2298f7d1598 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -35,7 +35,7 @@ import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_ac import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { getTracingsForAnnotation, - getMaybeOutdatedAnnotationInformation, + getUnversionedAnnotationInformation, getEmptySandboxAnnotationInformation, getDataset, getSharingTokenFromUrlParameters, @@ -137,8 +137,7 @@ export async function initialize( if (initialMaybeCompoundType != null) { annotation = await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType); } else { - // todop: can we improve this? - let unversionedAnnotation = await getMaybeOutdatedAnnotationInformation(annotationId); + let unversionedAnnotation = await getUnversionedAnnotationInformation(annotationId); annotationProto = await getAnnotationProto( unversionedAnnotation.tracingStore.url, unversionedAnnotation.id, diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index cd09d7c3f84..ad9e746d336 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -8,7 +8,7 @@ import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_ac import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - getMaybeOutdatedAnnotationInformation, + getUnversionedAnnotationInformation, getAnnotationCompoundInformation, getTracingForAnnotationType, } from "admin/admin_rest_api"; @@ -145,7 +145,7 @@ class _MergeModalView extends PureComponent { const { selectedExplorativeAnnotation } = this.state; if (selectedExplorativeAnnotation != null) { - const annotation = await getMaybeOutdatedAnnotationInformation(selectedExplorativeAnnotation); + const annotation = await getUnversionedAnnotationInformation(selectedExplorativeAnnotation); this.mergeAnnotationIntoActiveTracing(annotation); } }; diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index d11d9e8b522..432c960374d 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -21,7 +21,7 @@ import { getSegmentationLayers, } from "oxalis/model/accessors/dataset_accessor"; import { - getMaybeOutdatedAnnotationInformation, + getUnversionedAnnotationInformation, getDataset, getTracingForAnnotationType, runTraining, @@ -556,7 +556,7 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { - const annotation = await getMaybeOutdatedAnnotationInformation(item.annotationId); + const annotation = await getUnversionedAnnotationInformation(item.annotationId); const dataset = await getDataset(annotation.datasetId); const volumeServerTracings: ServerVolumeTracing[] = await Promise.all( diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index bf3048a4b61..a016e394a8f 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,6 +1,6 @@ import { createExplorational, - getMaybeOutdatedAnnotationInformation, + getUnversionedAnnotationInformation, getShortLink, } from "admin/admin_rest_api"; import AcceptInviteView from "admin/auth/accept_invite_view"; @@ -246,7 +246,7 @@ class ReactRouter extends React.Component { serverAuthenticationCallback = async ({ match }: ContextRouter) => { try { - const annotationInformation = await getMaybeOutdatedAnnotationInformation( + const annotationInformation = await getUnversionedAnnotationInformation( match.params.id || "", ); return annotationInformation.visibility === "Public"; diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 4de4cd9d4b5..4e4dcd9b020 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -30,7 +30,7 @@ test.before("Reset database", async () => { }); test("getAnnotationInformation()", async (t) => { const annotationId = "570ba0092a7c0e980056fe9b"; - const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); + const annotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); writeTypeCheckingFile(annotation, "annotation", "APIAnnotation"); t.snapshot(annotation); @@ -38,7 +38,7 @@ test("getAnnotationInformation()", async (t) => { test("getAnnotationInformation() for public annotation while logged out", async (t) => { setCurrToken("invalidToken"); const annotationId = "88135c192faeb34c0081c05d"; - const annotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); + const annotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(annotation.id, annotationId); t.snapshot(annotation); setCurrToken(tokenUserA); @@ -73,7 +73,7 @@ test.serial("finishAnnotation() and reOpenAnnotation() for explorational", async }); test.serial("editAnnotation()", async (t) => { const annotationId = "68135c192faeb34c0081c05d"; - const originalAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); + const originalAnnotation = await api.getUnversionedAnnotationInformation(annotationId); const { visibility } = originalAnnotation; const newName = "new name"; const newVisibility = "Public"; @@ -81,7 +81,7 @@ test.serial("editAnnotation()", async (t) => { visibility: newVisibility, name: newName, }); - const editedAnnotation = await api.getMaybeOutdatedAnnotationInformation(annotationId); + const editedAnnotation = await api.getUnversionedAnnotationInformation(annotationId); t.is(editedAnnotation.name, newName); t.is(editedAnnotation.visibility, newVisibility); t.is(editedAnnotation.id, annotationId); @@ -96,7 +96,7 @@ test.serial("finishAllAnnotations()", async (t) => { const annotationIds = ["78135c192faeb34c0081c05d", "78135c192faeb34c0081c05e"]; await api.finishAllAnnotations(annotationIds); const finishedAnnotations = await Promise.all( - annotationIds.map((id) => api.getMaybeOutdatedAnnotationInformation(id)), + annotationIds.map((id) => api.getUnversionedAnnotationInformation(id)), ); t.is(finishedAnnotations.length, 2); finishedAnnotations.forEach((annotation) => { @@ -110,9 +110,7 @@ test.serial("createExplorational() and finishAnnotation()", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); t.snapshot(replaceVolatileValues(createdExplorational)); await api.finishAnnotation(createdExplorational.id, APIAnnotationTypeEnum.Explorational); - const finishedAnnotation = await api.getMaybeOutdatedAnnotationInformation( - createdExplorational.id, - ); + const finishedAnnotation = await api.getUnversionedAnnotationInformation(createdExplorational.id); t.is(finishedAnnotation.state, "Finished"); }); test.serial("getTracingsForAnnotation()", async (t) => { From a6f476cc8034e5cf2f8e043899eb93e8bdc1be41 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 11:57:48 +0100 Subject: [PATCH 310/361] fix annotation stats and linting --- .../dashboard/explorative_annotations_view.tsx | 9 ++++++++- .../oxalis/model/accessors/annotation_accessor.ts | 7 +++++-- .../view/right-border-tabs/dataset_info_tab_view.tsx | 2 +- frontend/javascripts/test/sagas/annotation_saga.spec.ts | 2 +- frontend/javascripts/types/api_flow_types.ts | 8 ++++++-- 5 files changed, 21 insertions(+), 7 deletions(-) diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 2f34f59a8e7..92ce1c7ce93 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -708,7 +708,14 @@ class ExplorativeAnnotationsView extends React.PureComponent { width: 150, render: (__: any, annotation: APIAnnotationInfo) => ( // todop: don't use annotation.stats - + layer.tracingId), + (layer) => layer.stats, + )} + asInfoBlock={false} + withMargin={false} + /> ), }, { diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 1ca128dcd71..a15229e3e1c 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,5 +1,6 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; +import type { EmptyObject } from "types/globals"; export function mayEditAnnotationProperties(state: OxalisState) { const { owner, restrictions } = state.tracing; @@ -32,7 +33,7 @@ export type VolumeTracingStats = { segmentCount: number; }; -export type TracingStats = Record; +export type TracingStats = Record; export function getStats(tracing: Tracing): TracingStats { const stats: TracingStats = {}; @@ -64,7 +65,9 @@ export function getCreationTimestamp(tracing: Tracing) { export function getSkeletonStats(stats: TracingStats): SkeletonTracingStats | undefined { for (const tracingId in stats) { if ("treeCount" in stats[tracingId]) { - return stats[tracingId]; + // TS thinks the return value could be EmptyObject even though + // we just checked that treeCount is a property. + return stats[tracingId] as SkeletonTracingStats; } } return undefined; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 70c03b8adf2..e86d6c07f6b 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -206,7 +206,7 @@ export function AnnotationStats({ asInfoBlock, withMargin, }: { - stats: TracingStats | EmptyObject | null | undefined; + stats: TracingStats | EmptyObject; asInfoBlock: boolean; withMargin?: boolean | null | undefined; }) { diff --git a/frontend/javascripts/test/sagas/annotation_saga.spec.ts b/frontend/javascripts/test/sagas/annotation_saga.spec.ts index d5019cdd121..499b69bf819 100644 --- a/frontend/javascripts/test/sagas/annotation_saga.spec.ts +++ b/frontend/javascripts/test/sagas/annotation_saga.spec.ts @@ -3,7 +3,7 @@ import _ from "lodash"; import mockRequire from "mock-require"; import type { OxalisState } from "oxalis/store"; import { createMockTask } from "@redux-saga/testing-utils"; -import { take, put, call } from "redux-saga/effects"; +import { put, call } from "redux-saga/effects"; import dummyUser from "test/fixtures/dummy_user"; import defaultState from "oxalis/default_state"; import { expectValueDeepEqual } from "test/helpers/sagaHelpers"; diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 01f61ab5974..cdade8c3da7 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -9,7 +9,11 @@ import type { MeshInformation, } from "oxalis/store"; import type { ServerUpdateAction } from "oxalis/model/sagas/update_actions"; -import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { + SkeletonTracingStats, + TracingStats, + VolumeTracingStats, +} from "oxalis/model/accessors/annotation_accessor"; import type { Vector3, Vector6, @@ -473,7 +477,7 @@ export type AnnotationLayerDescriptor = { name: string; tracingId: string; typ: AnnotationLayerType; - stats: TracingStats | EmptyObject; + stats: SkeletonTracingStats | VolumeTracingStats | EmptyObject; }; export type EditableLayerProperties = { name: string; From 63ffe8acc45aaba977bc5df33024c68b005b8afa Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 12:17:24 +0100 Subject: [PATCH 311/361] fix superfluous request with empty annotation id --- frontend/javascripts/oxalis/view/version_list.tsx | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index 301101b0804..c6dba3508cc 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -181,11 +181,15 @@ function VersionList(props: Props) { const annotationId = useSelector((state: OxalisState) => state.tracing.annotationId); const newestVersion = useFetch( - () => getNewestVersionForAnnotation(tracingStoreUrl, annotationId), + async () => { + if (annotationId === "") { + return null; + } + return getNewestVersionForAnnotation(tracingStoreUrl, annotationId); + }, null, [annotationId], ); - console.log("newestVersion", newestVersion); if (newestVersion == null) { return ( From e7776bf88dcccef2ba1eb0880cc23599199ec5fd Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 13:34:31 +0100 Subject: [PATCH 312/361] fix type check test --- frontend/javascripts/oxalis/model/helpers/proto_helpers.ts | 4 +++- frontend/javascripts/types/api_flow_types.ts | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index 0a92dc310f2..64cd2456e71 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -27,12 +27,14 @@ export function parseProtoTracing( const protoRoot = Root.fromJSON(PROTO_FILES[annotationType]); const messageType = protoRoot.lookupType(PROTO_TYPES[annotationType]); const message = messageType.decode(new Uint8Array(tracingArrayBuffer)); - return messageType.toObject(message, { + const tracing = messageType.toObject(message, { arrays: true, objects: true, enums: String, longs: Number, }) as ServerTracing; + delete tracing.version; + return tracing; } export function serializeProtoListOfLong( diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index cdade8c3da7..d986c1770ca 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -861,6 +861,10 @@ export type ServerTracingBase = { error?: string; zoomLevel: number; additionalAxes: ServerAdditionalAxis[]; + // The backend sends the version property, but the front-end should + // not care about it. To ensure this, parseProtoTracing will remove + // the property. + version?: number; }; export type ServerSkeletonTracing = ServerTracingBase & { // The following property is added when fetching the From 5f0729222c2e8184f44dc4c7c32a6486836be322 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 13:54:41 +0100 Subject: [PATCH 313/361] resnap freshshots --- .../annotations.e2e.js.md | 7 ------- .../annotations.e2e.js.snap | Bin 16857 -> 16749 bytes 2 files changed, 7 deletions(-) diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index cb8f480c3f6..f94da6482c7 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -1250,7 +1250,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 0, zoomLevel: 2, } @@ -1317,7 +1316,6 @@ Generated by [AVA](https://avajs.dev). segments: [], typ: 'Volume', userBoundingBoxes: [], - version: 0, volumeBucketDataHasChanged: false, zoomLevel: 1, } @@ -1346,7 +1344,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 0, zoomLevel: 2, }, volume: { @@ -1408,7 +1405,6 @@ Generated by [AVA](https://avajs.dev). segments: [], typ: 'Volume', userBoundingBoxes: [], - version: 0, volumeBucketDataHasChanged: false, zoomLevel: 1, }, @@ -1437,7 +1433,6 @@ Generated by [AVA](https://avajs.dev). trees: [], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } @@ -2287,7 +2282,6 @@ Generated by [AVA](https://avajs.dev). ], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } @@ -3138,6 +3132,5 @@ Generated by [AVA](https://avajs.dev). ], typ: 'Skeleton', userBoundingBoxes: [], - version: 2, zoomLevel: 2, } diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index 42e2f533c0cd7d17c90440cc6fe13242b6535bef..4ce14317c58f27dc49891798adca4f92bb4a66cf 100644 GIT binary patch literal 16749 zcmZ^KWl$Vlv@OA%;7)M2;6a1CyGwxJ9z4k4P6+PqGPuhyf#B{skl+x80AcVq->p}# z-k;k)y1IL>z0W$kd%9|^(<5ylO{?o>?cwdVky99KE|Jjf(3j97&Dn7hOabV~p6Ko51`B5SXL6SW$h1}SZ z89s(#D;W`3iAi`8RDI}YN&dyCak;IW@0HAx<$YSglI44Hb>Nj%=sux|NcW?@Z#Xmd zy%fxOe&BI{|MGllM*LFj+RV^G=YqZAZ+`FepD+G(7iT?&YyD9gRg=!wqGmJeGhSJ} zl~-QSw=q8*>u&XW`U<}{ad5N-xW=$ZR4K%~YA&csa3gc0`69R@+GC)qYSDun3B)-; zK?G0C(!#`3y3!4pGg_b|c7GX^R~--!JFwb1o1(IIk4!Ls|M68wP`o0VXDOA_u_4XZ zdYmGt#xM<#MUsFrp14HKCY`1ukV;?kZNENkgrt5+d^50jbCFg1Ej_;FsaN23a++w< zWT@~rTjk~8+22iW&D*_02A|wtrgIAfMT6v4*6`Qw>k`b|n!cKsP1w<~<0Pd7n7j2B zH(f>l{8M1Q@}xSN-B+Im_-hvArmP&|7Gi7=@^@LbQEdFtOGPlDozY%QYQ|t!s zlv^-tEQ^@-G3+Gxb#u6A{6sZX>aK{4qLXU0U=z3T4dA$Nn+@K4II+WZ2g+fj`GmwK z{4#s_EA;Y|GB@h8DPhrBw$#*JQ`7Fr<#$WdyL#qf5IaqEbhXh3LR}%r45YsOH(G@y zTj-YVq|+n%`*YGNvu^d(hPZyxYxQ&zo(tykul7x++?%bpf4%!wT{*spa18egR}S@8 zB!^W!JDDn1_n$dO(Buy^c5EED`7|~3%+Dp1pfp~?Bvwy7o%_K%TED$A#n?}`xRRLo zKh^vBQ!ynw#rS=lt7$E;=vZiecN$PpQ>*u!C@ep# zivpQs*F~{&<<~_On81uUZbDqocAB!VYebJrB1S}wnj$v8K=X0`ozfcP{kj2Q_E*!p zHPaXzTldp6&Vvc}8SJm{w_^ffYIvTgWOiZls7B6Vb=bpY7{g@^mnQNfrxK|CBM7_X zCcXU1pn`FL*fNHK()(eNGOb*qoX6Tk;0n4h`JOyOwOXnppoZBa8xs7zn7+LolOZkA zHmy8QEvX2VKh1y&*JXLR5p}j@!zru5Q9zq1UbY;pYj)ZZpvnQpQ>YYdB$)N7YA6@w zr9nZB=~U-(hgpZ@?DuS#yh6={zI?UrpVqzzwTYTEx64ZUTY{8rTkV^%QOjXY)3gRY z-CCYrt;w)(zv$8~1GN0YY-wqssH-RaNfMMajUUVl-n}y*)woLEpup1m<&hbnklEuT z$CEC$RcKH|?Yj6KfAk6PcdMoHy;LVB2gl%)(251Oc&WEP=A;wV4D^ri2k|J4Ebp&L zbu)wmZ#Z#h+bWyZg7X#)=AY)@5}(w$%Ah<+!8lTBm8G(OgG<(59q}<2VLe2dnHv&? zP=l!?f<-f&nXiM|>L&Ig`M8=DX+K&vS`M4SEK5&r)l3G}QHYx{$u(rLRfde@;PBC3DMNWTjK)Dq3U@YTrKd5)xHi zUarISYVsPL49c*G?`5ZHOo-XOg3kIMe_}x#}>vIj@ujfG;P-sTf{O=XzxFY&6B(5Dy?tAp>M5_X24T6AOhQAR$5@O zzb9(CH6_drZdcoQ+&u3tyH+?#BG)c7)ZwFF&HOG#ALF&>HjIU%Xoa6t}nCA3zLp10* z$%?J*U^zKn#XWYLGQ!u=wYg5irKK)wzlD%a3g6%{MDzoGqbAV~O;UY@FVnDh9MqKl zrWt)-AqVDzHPt?ZL9)gipJ%(@%LV+27;02pr2jT0D{i)JIG7{fHi}a2$|ncbYTyqV zOjHKEQ3m)&s|EWKaH;`x)2w<~choy1i7i!C$5_h7^p9?$Ev?DV?TG)SKGwc*mdB{7 ztIzmBn-jU}ikATuZtA{biWRuY5WxBRfhoMCN$&|&OkDM%gJQ(9PXBfB{%7SfhoU0) zN9m9dUu6vo1LKeoY53dIz&EqMk5iE<2u%Eu$`VXWdxU_++rfq6kbvgZn+quxmgYw= z=yFHkY~4DTsc+z@@lChy7*fu`$w+nzvTv^|9k$3kslAQ{jTc_bi5Tn1<#Y1<6EXIT z86N+8t$38kMs1#I?$(<1`*)_j-9ZZXG=;8j?7po!R?_sIXG<2bC!e~W$og-n$a?Mb z$hZbx%6HetR*#mWO+1!GZYL^f3T?gwl7&*~alUq5Ev(jd$lUO{iN6}wJ9&liv@dh& zXjot7iX%+FKF)Fi*hmrF(iEDW{P~}vDa&NXX@tR+Q_w#0okTj7e@*|S$6X%d(kf8qYfrcwGCH5N zTh8DQnPW3Yd$Fx?9Ip_*0n>0?oQJt>`uT~RzYzWl4?b=}Vj8irRR>CP>(I&=>pC}< z_~-Cl&N&u76`^fR?*^~mf5!&+)cB6q znZo<~%y$R@22Gk(6LK6oStEdnUBmz-s|NCcIJf~IB8aUGCKBW{lue>&GywC{8&ndY zHI+9lLklPt1b7XRG;s18!h-~$9jGC5L#ZO!@6hZ#A)2?{Lbx=s5fYxzP|rC_v&_lS zyYeU#qsk~$uC2x>(srOBma-^WQv|W7h$@=jDTJIlc<-)(X5ofc22b&1Pr#G9kde}~ zA<6Q(_cnfP_lbvY zRD-!nSv3nGnSxg9hUabrM_{*30QZ^qRM4|q2iWRa1F$J;V$IW{DfMX>g0-@_b_-~q z#4@lIhmcOWVm|^3+_CQf5)1DwdHc%)r_nFTOf|KZ_5sg?48aGaeA1cdu=v!VvtChh zK>sjuotj5!O}lrttaLW!LtOhMGaG5^%K2F665R~AaAG6T=39UQOwJPKyt%>G9xj&v2&`d)dDUrKEQ8qT_ zF>g}-@uXVBFPhNqAbub*vJXeaXHERxPnKnBm4H&%JoK4_H}<%QGm}zy3DelX$U5rQ zpi3o~QEwbJMUCx3mML4}#9)A=WSoZtoC;a*~rR0lI6z_ z6umz`)ul!SitYS8T!-dpSI*tg8{03HKhGpC2uIC7->RjS-|l&raeqHjU{YH6-Vl1L zC+I3YAFZB{OZE3Uq(Z*^>-!4h4YO!Ly3vg|#Ng%}9bt^%;zeas_-3y!!oZX!N`P`|?7Hj`BCx{UeMG z1NLMt(I}<4U|VGqmfSr7esL$%(BHYp#{~$w1jY1kgc1l3r@k^dGEpTmHbmm4mJ|Dc z{G>^#+HMH$t7;e+({7Z~@g}H+B%BSDtjm;VYq92A2TzP`3T(0q?c%fpGpA~p8bv?n zfQ{cTw{t%osTrPjZ1!<^=O4a~?3G>}4b1?cxurSBf)eY|Y)A4Xm*M?Wd*;9YiaZ@S z<$jIz%R)W`;Xl{|7@x&{YbPwa@w8^CTN*n1iJi~rtfzOZT>MRGrxTpMX4%h5&k|(X zPvIV;*v^hE;#m=Z+B&GCakU5a)sYHrkCB`G@oI9*8ER^*!TM1hgI?RF?)}Tklu=11 zx}pNtxJIozUFc*?u~Qeamz+13;IPrOYEfwAkU zPQKPcUeRu}vgdV84{Mu00E)vm4Cw76N0v8h&(2Fkf1m?aR><~T2)+ea?4IupJxl$_ ztZ5gXp+W3Svi`Zp%bS#Lpy$ZbFmYqhBmQqnC+JK2&p|M7_9d~I+%2f4u5$DbXKPlJ zWhRT#!jGk~wdfgLL)}l_XohEO|7w?wKV5qnYJPzo+5Djoy^`y^M_WpreOv?HJ9gWI z25-&KMx9p{84WOxl0rj=p>PJqnR+!`@x3wyUdw6gwfF;ff2 z*khAn734q-qDv0KK~ba*yCFhyD35kuk(${m&eXP*gRs$@`rAvM2vgz~vV%*HUOShL zX+1SB{!fN=TdBACy{0ZJ-mAW6)o0y}eoPa7BZHLXlus3ueXBSj=PQAQi@J`LYpT&P zBR)2BRHQ17DP41S?6HQjmV&?QtgV#>E%i|2T333b2+L$?Lod1$Jh;cJJ?vdr= zZW9YO8!ILw&3(I^d>;+fE>Rs;i1Lk2!o0NV|E;!J;t(8 zGX==Lp(Ze(#hflc)&#o5acB~#s2*r>B-N3=h+@Z6QRU|0CM<$7a1G1B$v*xp)cf>k zF{AcFk=P>QNXaQwR6X2CZWCeV#J>m}y zC_dtU2dlxwNK1eCJW^=jw0G=aqxr&`ql-m~3@~~{|4|&RkSsx6gJfF1rkjDgatdLl z?jy4j=RdXXqKUr+(s`YqAWE23#wgxEH6@ps@Cnp;w~)Qnv|IrRYCu_W z4hHjoC@v-veZjmlJ}W=`%7?V8{HN zX?fmY5?U1&vom1E2FlAQhE*@h&^pDiEFN+<_$+nGRL^1-f}fNoKTH*MzTVHC#7+A? z(_M#c_heodxhqAQ67C(y!G!R2Mj{Mmp`i?@ytG2<4opPYDPXl` zSj#3Nnpl30dv;vah-hbWVNFj*f9jKwEkF1`7C=d&{*oW%D_6l<5}YgGo-ychKwy^~ z9^T0wwZsz%`GDvY0ur{CZ5RcTi_nlcsy~MbY{CIK0l%dzyy=kKIu3tDKDz1Y^WQBy z@>QIMX5fL3W*{y`j~`nY9DfX{`!wz0WeYpC=06tzx7x!L8DXaq{J}2KQd6n; z)WI1U2%nISfYV8I!R>vrLx!$nKym(9ONlLj<=2%Ln7+x|!Q35yWq>KP?u>9RQ_i0a z|K(_QJtfgHsd>mBs~$TwO@659Dc^3B3z{|b0gMmj_)c7JK;{_5^zEuHkb{a;MC|Bl z47MC&0Gqg06fGGCia5ZxW645 zbif~+{6FG^8iG5&%_1J}z%owA@psr0CHmS=-$r$oqBS%2FYctCyp~AJ*H41Wj4vJ1 z=jtP?=jzw@=Ii617H1=mDDtIAdr!-gi>uO4O zcoKaTG4F06vt7APVLC|p9K(7O$dU%+YVG_UsW(;8D}U5{_!_7FnnOsF=@c9D>zB1D1oo05ztR2-s1OL`XN2uB@ZS(ZnZHeF{F>dL z5CqHc>tPPZsrY{2mzDO2EDaq@cnx0D|jY)URW7v1g~+oh&zWGms`q1+ln{3P4upmOB9CmWKn1hs(B zaugR55fI`gJ(AAF!0H-lEBma!|ekMGQi;VcyyJ^2SO_p`CN zJx)!ZzGN&OTy^E%&;%40>xB0gxAm3R{}2*l*)+&?vi`*WVGL;#rjS3-<*&Zerdxo(nC)hX(uzL^-+N3~1JlR8>w zkqjgzGT+Wg7Y&i8huF8&eF4g%U7miVRtkv^W4@33X@3eTiSfSmFtimov6JX4iMhOm zXolt5AmqK7NhVPbic4MSl~x*GLcBM550f)d33$cC+=g`Zlyz~crLcCYM{YJ<1y3^MX{#1$gDxjq7Yl)CP+9&uH7pw;reZJk+S;FOlQ5=57P?|Uw78uc6sZ- z==zPTVA2jNtdA1KDTjYPSnY1NUWIj5qD1snou=TN)F^8VVVPlXAq8U`f9?0`Lyl1e zY6DD}V0Flrk1wm$#YLFRwN_uOBF?|o#!9Shv=Qr6%ta6~p5&!Imc)!IuJ%9HWc1t- z+=lqMS5z7~d$gKBsEdZZPv&i{5m3M8eZ;|D4J!(rC<)j7VZO1?%H-JL zWYvpWxHPm`3VTVEOFI<9c`6Mr7a^lysa}Z7Y}fr|WkAIl7oqQ|MUGY|LZr~J@{f?{ zPn7u(${%C;hOzyL^Y)#IuSo$LD!ri?i@~PLFy{QX0FAT0#*Kh~Uic!h&K~LQ4mj_$ zF0?**{G#V-^px;bG`pt1*zSVRPnWxzl;5w$y-W@T&V<%D2EXH89VX!tuZ|*>gPIPw zJPM?Gl2=9S>~q`Phg67Dt1W^44k_%R*Y&B3u2G5LUMg#w4NL^7qFtUrSg4KU(ai@Y zu92>d!^LOP&|`yKHkG9FT)cH!MembvxP;S*;!};aO3p{4GbvRtM;b}>SbLjG4x-a! z@cppZ)3?{=ykwP2MA*@YC)@-$+SGL|s4ve^Nr z#IkJ;<1Yi9U)yc?Wwu59Y`Rr3GC!k;Ho%$DpN*H4uV!daG`k!?K%CD!6QO7GS!tsY zu!-b-z@w*r6sx8kp@>6@j86I-19>SyeI<1UpgzqXiQ<@5hEr3(RGx^)@0zVJaWY1I zKmsBfjtcGcZiAooXtb?2N!2#@|MgpKkoyf`Rd)U=HRDBL>6c<{mjN&`Ic|Ph5Hqh^ zlC!u75uU#<9Zr@y`6@%{w(wYB;JM&DPYo;g>-q!H2wdK8^UlO2IWE>gtBSSs^~^0H z>sf{Uy4XZ~e`iQfZR-`Ldbx6kjd}OOeAV-XUu)oM`V+x_)+#@}rrN{}Pyvtf7AQ}I z_ih$XDk@U|@65W2#$pw`TFP#$C08dX@Xer-JojHUw(!ToRP?vLb^iW^v{7I88ZN(x zm4_u8aeaIe3lqIUv~4H}c+;}{)-0^g0iF9}h%hoKEkhy*WlURv6@%(7xFlPWe%CUj z4*w4>Ff)WPKF0dTUrIh{Kf>R!IZQ2tVFK4`z#qu^%xkRilU~LGtJ#o1LUDV*D*f*U zScDNXYzFzzpNv&Od$EUv_SKU6qXOw>fnCm*q;C40j>_|-QMq*;L-mGf*ID=y?Mo!a zXL);Cmo4J!Rw6^@u>56*{q8D**&VyT?e%Hqy@!^t&}joj`dx>Gz8|#x-`C1;|-DBUSo;4Hf^X3 z2np(978&d}C(Pe{DZ!X=f8I+Qdh!jTf@%7$QKJ>fVRvQUnkdZ;dvFmsT4#`OFAr0{ z`IqC~ZnXjur%p$m;n$MlFP7(z<@``9{}#_85$fW~bZOd2?Z&_9y? z*Cj{gRmDE@%><4|-y3fJAyrh^6oNei?C6d^$)g9q@%?bwzS`GrwS&j-2;SxJ^}t39 zC&WdlsX8;t%ioAiqgZ%MkA@Vl>IZnm-=CGnA{niW{2|-AlAIkercuH% zT6hZD?}3o#gtfOFZ=k&%4L*J(~w;zRthS4EDV_SMD2uFnB$wL3pY-BJ8X z-Ff6K)>TQ|P?BE?u7q&IlKzF$2GC2o&PMQWoPS!@H9s@_ zYSEQwqi-_?2A=CRNVXegxV{7O{D+!RSJheJ-3 zFV;j-nBec_zKk}@B!)b^NO!9Kat)1WxA8fi?u!6n8!K8jTfX1S;3r!k3;1drAAqFt z9zi)sna?Z(TbLQVJq~r?J13c_YKzc9$5rad`o5QB3*#mK#|%yxFIIWut>Rp8?6u?3prix)~hCu~DlK7!qMw*>dgP6Tvr)D80A;Y&Ez(KrHH9V>( z!b9As=4w8`Ks4c(XmmoUyVs%TPfJ{=7ByHkd{zT3DPF29zAsiU9=N+~T{xzdUq;MooR9?n zJ9h(pw!9<>?$Ere{H&g)cQKgwc z0CRaQ1bsG_h?xCzOkVG3))b+;DBqu}8sySs<=A;SoPdY5;xlQDR#j<9wPLoBK4s-tWs9n6T0qWuagZaU?q???<$OXI{I6OP z^x6Fn{GeA7l&f(PqN`Gc(Bso3WBW@E?;9}uxe2!^!h(g?NH)+GFT0viv%K5i6J&p zzY$Xys$-yaW26$&(Gt|Fbo#55ElE(TIQ(JY?6bnKB7?LT=0hUjs;oGWC|^=VOHgNJ zZBr3+Q3m;-EuhK#aL)=0B>M3gqx7C{zQi~+0|iTN9x=z^3rSZiBv{)gU+rdwvr7ug z0$={;r1*_G$x|vyzXJHjyp|LNHw6w}QnaB0cxQf&gYs_+ivD-oS_4~ft;J< zs3qsqM;aZ1FBI8gcYR&$ObY+}b7<#%F(ua5(c!daEeQiHLB-0`{AQ0mBY#`YzJp$k zi3sfgt$ef@@wIfc6xhqFVqvjUxY}}O0Mwht_7I#NY*d7obqG!l_UC%{@*OV+g}3pe zRy+KIFGZrE_b46!*1&SLn^VL!0|d?ZQpIt(U5>C6tdD{kZ_Swqr>W48eu4zFZya(Q z#zm^B(5ezj>+EQHtf{s{aV==jUT@9usPXuJ$}anhd=+=0#^ZT>d3EY5u9d5t5O`Bh zNE?b+e`9XR`{qwRLX%ops3yX;+*lF1Dl=y7gk9|xq4l<0moL5eUhj(|eYpkzpbYdO z5Mcqe54^t1VXQV`(7jz@^baL)U;z(}0|WV}gvmQJq7_-d&flfQQ246QYxDW^bt{Jw z*|hjkW^Trkg>)#Jr znn*yK^3d8l=F`VBTrp8)` zL9=^%!N2FuzqiXTErs z9(c7Z;5zQnYCP**hfca2^P^!N4Ml(OSf}#$!R?bo8^}TWdN>U01~jT3^d#qFb#wzA zxwp=HUp%;2V$h!n<*3)7465!4ZA=b(JA&c&h?pg87*%iZXN9dEoW6Rd23c zwvoe=yqr4%V-mu;d&@Kl*|CE@@WC$mc)4OQCav$iq*rIY%I}|LUHUOvkhEgTKMLyB zxn|Hk@N9)X$H$;KK0olcf@$0;Zo5vmD*lD#?zBL|Lt~V$tq1!zic7LeqYGW<=5)1X zQ-mCjOR7tPC6yZaS`BJ%m~b!KG6h2J=fxiJJDna4=vob}?{~F(x{oGi z&JLe4-g|qGOdM|oJHkW#&ED5B+CA%D{zqS~q2N!4hYWa9TEZN&`g}x3>?f{ssg2ci z4LB4FM1tiV(z3Os(dj}CO4>`QRDXvHm*XtgE}Da^EyW z1$mmsKimfReBEA5)7@Tb2zx&qjAeIsACDbxolCY{_XK#KZ$0^^yX|boZP!b&+q=z( z)qXv6QI7W}cBT|CA{I2-D3;!0-Z7E2p%jSanm3V^p%h?bb>fZb10E=T*&+RxzSa-C zqk>{>{yxMLZO#i#D#(jzh@Y8OtUv8@YqTWh7v3eS$g{lo;r7Dp>n3@Ae7w~MIfl1D zQ1R!~w4vO|;b*btJgMY@yzhpcv-w;Wo{ct65vbowQ9n@%kP!=#Z4`rw;r2W#&VGP= zgW$1@#ad0B*d<`x)la#jLyVt37Q*jB)TXwA?bF>t z$_ab#Z`ZnwofBrJKe@th%ScCd_x%#0W$nHQ*wm>?Q({HuSuW%X-^x zqc-t2ik4by4Z-xd+RO>gA}t)sy1i2;e=Q=*O51L|wkH|EJ`)^E!-xR|Eq&(Jtxi5gDRR4&%nRT)oa#<~U|XNSz6_>~@)ct3!npurAaP(h+Kv%sRPAM~&l?$Yu?r z-$Z))lblVSG7W@6AFT6OEy`@ynM`)}B3TNc9fSjf`pXlXEM2rCC}4A5FjH)ke#<#N z*SiY4^;`89{s~WfLgXm9N zKu=m-t!9JoKKMys|FISmihLGO` zeujGS=9@w_!9RJ~5;d8U-1)&@S??P)fAaj|zLJZ;H5%3Zag8ZzMUFLM9!(8QeEhST zC#FVzx7{SB@xShSAHEn4{!y|!KV7}pZkoN|wcGL={3G(e?%_D|=N^WP<~qyH_QYwk zk>@PIdBRz|RCBw`-mRQzqRt%EJ7v{idr~e>G-l#gRk1~a&|-k5ei%iMqM~YkU9_i@ zQE9Z`d}_SvhVBwj8HcoXk3w*(KKc<^$Ofh3N;WO2D{j-aswb!D$HuUv*~+lwlNQz^ z@co#ii=-&-#6Bclq>CQ+PeMe$XYtDLTdt`UnHrJ~$31+mz7pbGMIQRQ!`DSXc?8&p z*8nZS$D z8b!FIfS4i~$%|SW%R~nx;*V19c`z(d>~S(Yji9?zz9F>DwaUj>$%6zFZaKyip$QM# z0EE?{5g1cfysR|IaWT>UUCcBzDKWzPbko=jQ@n=dgd_~f();pR#&P~v=c&DPKN7@b zLz3~Y_BTs1ZPfd+*DP!Y!5dlUooe6W?#rQSQ+FeUarYztY;9KBNY~x%AJd*`8pnQ^ zk;1PYuJOOWbkbx#EZVH|)Y;5B_Y0>dyv~Iut^R{-$>vs!g`Aj~xyc)|5W6XfsDJLtF%;2|kALY$F@3I*{P-HkfuvuqyF{U!>8=HXD!y+p$>%0+rB_b}TS* zY<=C+BP+yQ)Y)VGZ!jI5DZ%AHUF(RCCc$M|179!?#H8zQwrikP@WJ{43xvcgMTTQ! z!J%qVyE=4SI0}^@?C@?QjoGq%ziJ6loC)Ne{cGgRy*j&lQ^QKo%3HU6Eyr({h70#O z34T=|RY#}>MQ|?at9pcG6pQ;euFh9INiFl#4h;|TFn^H4 zR3xEuPC zG+Fzag|hy2dG$6X?*YvJ9F>x0{3CAZ^%T$Y2CZQ1G>?{$ninA&Cv3gcp;WES24M+1 ztW{n+LG32QE@!$I*{t@6esmh7ZF%E=E4c6`P=XYi;}g;TK#=1 z;)I?rfZRxdh=(1n`Sq1wVBt_l_fU&Wgy;}2iemTnUkNm;1iG%Jg%eQ-wtC^Mep?f4 z*DJ6789Tx|2J{Su=UVi*!b z7PK>tMz5P(OmpKo8a)1R3RJYKvP#z}e4G`MaI<yp7Vci3=`)&KK3AVCQ~8Ubm75)Z$5Q+)Bg<`;frWQCtlCYR z6i)LWqOQvEiHwyBDdUsX_sfTx_s|_Ot)wW6Pjv2_ddX3`a_pAtJL-!a%^ufA{+^sq zKm8;`@G6hO8SsP-L7C$I^LMYL|CQmd+(^ZXyXU2s8m^R6mJohNaulITWX??}vC875 zExE-I-QOW3E`LVE z&PBjt4qNp5x`@0c_(rk_;WTc}nVvHnJ>x%$rd5uMa%@~$SJ@VoV}ZFL?tsYWy!3wZMTUfuYdH)bL+97D;YK3O%S ztqWOV^XR{P*k*%X6CLZL-$wz<-)eA0I{())4VBwpQP3`@OULnL8wNN0|poj}`(p83!?%LOn^ zfJD*5O3{}VY5gcT-v$I9m<=-@2|zU>>MB8H180|&;a@}(b8r(X!MP_Ma90XVj%GK> z%e+ZF#;-EM3H7Gnjp9rg+_yL-ht6U92cTBl%V>W4X*j^PC}7R3R)CS5<579}aFPRk zQPY|Q?6lH`l8D%YYUop%f>SC^9*jLEIsF|;f`9eW@KM@NMGLm>wXJ(xxcHCXy=MIf znk*j|LF4qMb;qrTniol-{mDk+nGNsv{wPNJ_i)(ezoC$H`hB?trlM5kjMr&CDu3NI z^S9;hqnmL1HW?yC`r^**L-^rqQ^w>WibzBXkzMD6-Rf)v;Sor%^!&?TbIX*Oh^j+S z%=JemuiJ_4D8aHLS-77e`A(=UoF7(u@39^1UyCC?^lmz4_DFUgk*pnoopi_%3zCTf zbbiy00u8-j`YHeao3(wML&kfPP8}x_5=V83@7`??Qj+ zMi03*Pcg?HNhq;?Z`MWTW{LfJ^ZfeDM%s{!26DqR=Zv9difA`*=A1c{%mm#bfUVCm zm|-u*?;XPjDbm8GOxWwNf%RURv=TjVMxChcWmf>L*|rWJUvuDq&=#Tr6HMhv+8}a0 zt_bx>$Ui&isvUHXPss!qyg7ulHBw4xO0peLkS*WI=gZlXB|l!;+Y6VUWozB*6ioeR z=^9g|L3-y#KC{US(_(P7Y?)G!_p7wrZ~Ec~XHSC7&(a{9bhm+4cuF_iEjU8*^(IYl zb20Wo^Rpq@c5MDMe=7$s+3osaH+(F=}Zl^ zOHpw+3q^(T(jWu4)cpr9+5Z1Z3KnGV2=e>hG-8$6&P$ZQhn#@N-L-q}t2E9J)7R#E zN#OL){=kXleL6b)S>|)Sqj7Hk_59fUy3}@_>k=NQ`Y|(oM+MC5HS+m1S^6l=5@H>i zu#F}59~XUi7yM-5b9GNzn0EKG5q>2T6UK{AllPNnv7f&kO_sllX5D^rf)fjc*O%08 zIp+Rvu$i^yW48}h!hX%iqw(^FZKaJZP3{9L>7spSl%}UqshKJ_{@b{pN5o@(_HG%N ze*e7nqvGz~>rEAt!zvZxDN!9RUr=6cKK_8-cq0<*wtUPg>McmK3nwp829jB#hY z*7R^U<@EdQGT(8;R88((c)>J?q}0Z}-pzXjqcG|ZK{2!G;0YPVl9h-P9FR!NE;FLb zD%*fChxF+Atn@eI&a~!3?YV$bk%A>GD6+<+cH_4gv7LHfUq)sH}0;Qn0rM zdoyhrWAt+DA!!F^ryU^9|JBTZ1~J)+(3-*%SX#wr->6anN8eD&UtOtWvczXL`3?!8 zi64fo%EKM_%t6l~JX|FsqzL)bopF^+xb)Ut8A=rNn4(ytt<8m(zoO{!!rAIY{TI70 zuaa4nkSS$>WZC#eo%n{NCg}4R=aC_(oZZL*rM(oU#x&U~Kbco+w2+nl!HzKmEwhhQ|xUX>X z@2423K`k2_32K}2dZi#ULDF@GE5+dUG`l>9byFltC&)fUm#&^Zee078PM;Q1(Fy|n zu3Jr-U9ZCjFh+kt>SZKTX{}ezffGwtfk!5x+uIS2bWSn3$&?R!jx2p=k?-X6oozF@ zxRty?N~BW+H9>XhoU5^!DW1~ZSQ=iSj8gWhslU(MYkL4gGoT@nADgiShsnwkpqb=h zb(rxR${JT|UkgccW7LC*h$Yb-tC2wgrx@|Yp-ERONnt}&irfN3_xEE)XamZ!$?*V* zm;BC-E-{{$n7V!Nj)_qVN$u+JNomXC!Dwe=)P=M4k%_-QnNHQAt%}@i)8q}6jGYM!;==UTAUw0Jil1g zMudFfeQ#p3Z}-K+mU?L!6yQqBJWzrAr#5161r#$+K=^8g7P=(e893&M^#1B8khL+U zHc*5Mp5c7D_YmmcN3-Jq4}_lW{R~uc38SMgxO^cXBo!K(LInyAHUC$w{7s`%*dqnx z-XShd$}y~og4T_iikSc>hK`xr2<`hSDE77?;Lr)N%9C+|g9+w8UU z%jfgO&+hmiHU0mq=Kn54eltg|gzIg%F#iv1q44QbnrYvw8}IM)S(pDaZJs|Go(uQ; cy_MqF#n!a0alvO^mjC>g)7s8{JHp5S09G-)3IG5A literal 16857 zcmZ^JWmH>Dv^MTi+@-ifaiuWgN7zIOk}y z|LE;Faa*%Ubt;m_Za-Z-2EzYb$Y%w9R#XcAUSw)5xYRuTKxrwotnRUA#Dt@tLGwf( z2tTB_ts0owsx=qPNGM_U(F(N=Afi)`X64R^9e<{NKBPIv>nx+Crj{AYvg4p86!M@M9S(EDdK4;suNc1_2Eb<56~*s#73LnCF=g zZp@HWdR^2#4#$sRc8*R>RQzhl7m}x9q&!L%kaYtkp6<})%XQuYEQL~9U#_{lMXh^} z#56xcS_O+{%~hBQdyc+bUPX2GeO$TX+!}Kpqh5{YsBqQjI*$%D%Qx*K_rC#VB;3+| z5KxA#%3{DIb@jP(l}An)c=e%3ikDS{SHi)>*J5`6W5?*h##)1?TehmrqnmWcDEpyH zKg8!BslLxk+PgNss?yHx0b^|)9X)Nm3|&3J#UrkX$&8GjNA-U@bCF0eUt;J>J$;ss ze~qzEc@Z1zi^j)$dRA)I`jQ%AbQqa$dHUHEv^bx)@3|#vYCXPkJQ!7Nc|$7U(;3!dd6;)yE9mj(le$4zEy zzYaAYwoU5Wj*){D_Jw=ut?pZ*wu3buhCY#fNSjbY zaTT(2k4p8I_1q7%ThO^n=)MJ_Um(hn$ z6J5<)yDH*Le5F1D-}}SKvX93z+E_Ly1{`Qq6XO=xX^gLR1T6|w79Q}9C?2E-%`^xO zh$FiS4D6x#zwaGp*eVTOMxdj5?-I$bXk;B}&uHdM?5;uVeipLG!(kv}6WLD*b+PgD zV`1+UFa}hK&!VuiWV;iprZIR~7GUOTG|i(-=e)Oz<`S3?iWtXn$U`Nm+0@TYVPa}w zIuq;&rQ}i;|1JSoSngTxb;OiJ34wC2mmUbaZm|+9f8%{j&-s zKBa!KKMx#=LAN#N5EMe~J^*#CtD4X>{2QP-*(emRs>{N^7+|vL>zUKBJh}b%lXPjP z=0Z}l{^lXKQY(irwZI}jEp6VyE@Gual#04xCXoWz-qB^7)G;?}QH*A2li}xC(Ifl! zL)-Z7+3o7A&jv-sG;ae>_3_~~HptU64n#elzc|PpnAK4Z0xy!UKHI}vvzwju;LV-H zCj?iXi{7Nhm5)kgWtG=uZ#noEBqFg;UNH3Nby9x_jrYi@LFO5VE1!MXYwElM`@Yp1 ztugO+tyA7TDf&3(TsV6NxMVOb3G4fWpdt~{G=v=&Nn&YhDVeHo!zF zIb94p%@d_PRAS7OO-q^TG3IUD01I`;w>8IGlft4>LYi6ws()fqPJKi~RZq+_d>Te5 zXAMNgWb4oWWuiF*v2Olp@VHnXrczhtD32U)D*+;3G={G7s5TcaFWz5X50uld(z`=*_6!PcJI_seTR6jia~>CH=oQ~`{h^w; z`#8{0qn>x-+~;i%ukWh9sl+KIlUpH4TwT-Z>+)xV&Jm7YDazHfzy&NVV<;KU=)w;o}ne@BMb`17zb6m5qnv zckM6Ui5d=UTnGS}U$`|a6r&j7L2`s#m=E=KpvX!>Qgj0 z78mDvU1hLOdY~_nPx648Att8Kl_92E8&V-^JSx9C@k?~|I!-i)g(ctB*EZjEerv~; zRcU0b;iUr5>=1;{I$!U2A-kr-pNBX1Np`H+n@s&zSfj@arLFACD6I0wG?Gyo2&{QLx=Bfl3 z8DlU7wrZwTI1>>uT$LFMAP*u=B{BrUf zpE|b1PN8a#EjOn46O-9gI4l*n6}085fAdYJS&b0#8^vD`(LyVX;CqR!9jVbLU>m6@ z7p1C!1JE8j-x_%GNp8&77(#$8=>jy|{hHdhEy0#!!>iNW`o=_mDazm>kdC>B#rCez z+_!=!oof_pzOHEuYt+bE$vgejnp4s`>ea;B-#fi-0WIDYP>$OWmy{~y98u*8OT$%^ zR~q|$+Vm4oX&rie$hNxc>py}gvu>^Do$e}2$fXf(lLlEsOm{^nrFcIUVh3MV=JD#! z{|v-v^nsUhWG4HG=}h+hGB{6j#0c}ifC10$(5!Oz?F#Lp9^jPp-P;de+!6Rr4!zQ* zQXxo~=c$k)LL+@X71UZiKqMB=N3br(y&0;BP=etQwGefAG@6iEW%Q_gojS9xg-TH^ z)J3S=$to6&QV7tE=Ds|zEeNYe8*#C&AqBO*QNHQ=i0$VLrQm)t#76%*?`iy6*lLFE z;xkhjk>>-;$9t)XV3L}c84{PuuZ-C~fxeRpri9fa&f%oEg~{OnQbqB#Qg7;SZyO)n z&=mf76gBmcTZv|H(uSGd(>-Db2943jYz1PJI3*CgZT+T55t+%RqLy$aTgOkx^3*}r zWRwi7pE=RDq4w|Fc?$Fy5OE;Cd${T5y*jxu`MIoScP!CpXcS_7h)QKo=?@OvL**1Y zyT;ysrw|OD5>^NVpC9G%X3Ku;vkhO?*hFV@{yg|ng84%5hF>lT<327W=&m-9BD`-3 zCvO6f(AA?KffUaD0RyBq0jMY*;39og6xeW%Hk^R98M)d)HaR$NAqdK;2o;!{^=rg& zZe;O&?1&tws0`wbPRa-#NGOCk=bCTg3M!3UR~Pi3eUXB*5dsmZ$F$jeD@EU*k;kLa zGce#y-XVTSyL%g?`2CX-gPf1iJpM7@cVh=0no3Y3DsAg~di*mb>;#PjG#aNYrorFx z809f;T9G}&rnU&lw~6xpyaay@$nB#P7~W?Pl0@sgN2jU_O2qSho)^FXTwrF7r`{s;_C;$b_yrzaNt zMTv%r+x4asX9c$xafLaXNwy88N)^zDGJQWSgY`+ANFPv*m1GEJ=IB&IovwvQkT8Lu z2tw+HL#jfZi-0If-&&}G&=a{7nKK~hk(x?~3?%u|rQP<0oy?h+F``~?A>)OLlb#BSw6sth(2eBak))8+3d z3S{rNoqbofgEB}LugB8uH5@3~BPmW6nIk(6AxTE5Wxhuc0-E4*r1GF|YHSQ$tL<5rO_@_%g>(1mtvbs}{s0fdO1Jf6Yj1JIBUbM&ze?gjmI20)v$A za8-r3=Q=2%%8XA-7|E1SImScv6vln7SXt<~VVa!)Lky;~Isn9XqCs%%)<1$Gk_&Mn z3U&(D6W|gVinZ@t!@>W@+UxwG=im1QR?3%80;Kt!}biP*`ShTBn~->c{uHRWUPCi^eAS4ZYEC#{1NcL@t}+TJ{}gu&N|Jxt*va3x zm49ORUI~^MauN4UoIhFbE#3X_(&^mc;I)hp;w7iXrzmBiU2#HA6;r5C%4E0FeO!#4 zuwe2B^F&ig?oh`MW%T+suob@I*sjpsUN?=YkbIgIk)Dfr*Q@h&dZ&wKFb`F)L&Sv} zAPmF(RG+rkxr8P&mLs7Vr{?|EVyv&|JYLpH*}y%}E}_3=$i~-S?M=H9CELYYngM#J zA8kX)zI?1;NclNvPQHgIja~$Ep9zd5D{4+%Pm_tVr~=D7JhBF+H*GV6iBw=J>3{hm zY!_G~m?=n^nP!gX>C@y!N2fZws~xF>0wkEz_0qD}dN9+iDUo6NotX_@`iH?q z?r}3g$Dj>dJVOjYZ3&K_34Cs~Wj}@~8srk_52?=xrtk=I4sZ)-@ho1AgZVY4ZfE6S z&Fr6Ks1Yl_&8fN+WcV2WFzHy%(LbtdyhAe}Pg=>779QHI^A-^lrK-)>|5mjZn<~gD zB;HNNapIr&Dd2i6L!Gxp z1b8BY!;1bZ7#CEsHsNF66{Cdn2AslmOBJGgQ92TW=TKg;_T_nmhA!$M#YkFarKN?9Nh0mT9SWCGGDPOo;`I+ znt|0bA?H6riNrX(t8mX-z!JjuQ)rAf@mP}`ashvNxO{FZDJ0w+>1inQsl{BFON#MWVrj)ZnC(i*Sew}>9W3A$6`1kgYV8mF zOw4it{GF-L9ou_3{@`>RIo)EeK)q@))U}V0PGV8UB%PA@fAV^4eN3(=A7VttcfCc# zsCrOi&T&vuuc37{H_KEm-Oq*;E`eg6b?mzDmh@3z?B$8BzZJe+=%Vog zdY;y-x9Jf$8`*~ov51-_M8--eDv#u;19~u}g@KDWX%kH=I5+m9(>Ql=L3ZWiKf-8Q zVh-*`3#W1;e-M-ZRwc*4QIIDklV9;`pXMRE8g>#nph4~t!>7N#bdtfB-|BcM!i3o& zm77{Mqb`lMdbEH9l<2Ye>6G7Myh)XE0+Mm-q&##)^z@(@l-RS3L{1#Ae6VInXZp}p z%)yBEHPHN%Wmxt6AiZ;z$T+m7&Zo^+u38KGY$#Z3S$d-A@5{^d(UN8lFePBdU2XYg zdf5c(XW+kuWoi6Yf5Tq^k6yC{tWH(e0oEz)q<+6)9Z?{R;d=TM%f^HLhUhfKHiq5T zh+8q&7NN9q5>IIr+Tpvx9XB4%oqKl*i=}B*;j>7X{DsU3>?<;4mDMH8=Z(Wov9(;s zutRW(qi~@FSgGTkqBNZ!pV=N6I~$duB~$uVL)nFT%h2Ss5bK_*5thK0=uN@>76>Ox zmds%{pKK90%?SofsCAqFs(gdiy4;ff2xQ?8z8MPvHl~KM-yhS{P!nPY>(@sF(&Se} znEJqa@$|rnKBWVOz%PEnN)e4_fq)aJxRfMSIfDSXRXCynrIm~K{V^A^nN?U14y+q7 zBo5CKS7gwUF`cpmjNw2DHDK%|=5}V==M|ub+k14irjugKsuo!W#9yVEv#JMzB;U49PmPym!91%9Z*Y-q=7L)`$kFy>Bft zhx^~pI?zu@zlfB^)x;xwe~f`mO-A#hdmX8V8y1Ob@fFRo&kl&s1#3a&{}j>aNoZ{X zplfIT9NCC>Oa#-%=jYPGg&O+#K@I8dq(d@D2OEEP!#FL;LVOZQ+*=1N>tJ^D4l?2) zLW^hut^hzvIu}+5352U>3oRMtjv^`CGUv>yW5`ybE-uNkZ4LShvL)72x@&l`2DOoJ zOtmueega8i8ZZObc>}i)LT+Bkp7@t-~%Kwj#25 z2?djU;SG~Lv&?y`1FcWJA2Q7gsel=r(r4N_dbiu5z8q=@p6vW`Tsom-7UX=#BIu%H zD_CpqFqNsyX41D&q_&k&-h*G#D}pSW<@dt$=%^)g+Pi zs{U!@Ro?_hO(3?|`EEcpRDGv+5RGNiKpGD$S&>F#@e(RIGF!cs#}pkO3{ZFR_<33E z+h2#Z^7qmN&2@asSo*W<7h27OM+Ti>tm&^ip1&VMjiKvd*|=C`x?3Ktz#%lHb;$8y z*ut)`{|H*q+5lm}mGm-Rl&_IZOnqr*@XbFv-_A8W{K&?UZDe{O1d1DhpEd))cj8T6 zn2_=$H?QtV;fPv8ibB)dwuE2cA(s781Jp(H1`gOI7FSRIvzgt|7IQxDLA zwWSR<AsLxD1$gaqck%CF4KKO>pB zza}t?-I1z^3-D+D?RgGo?pp{l`gk`{jYESIkE%gg6)`4mR1;y}UwZ8gtHyhwrO}|Q ziy;6@u^~^`W*tE-qOv2uy&X-%yVvJl&;AWMgibJuA8!wB2t^eG5JqZoLM z`Lvw5-k|{G-&!`ZX$vfTxmK#)f=7HrdU+6jB1z|PKMhz5rXosO(T0>ln&uXrxO3s5e+p3sa1 zj7-?et}q-%xYqOhs$#wwQd$Q3z4#87XO)9$WHsDW8NG7eqNuFIDVa=sfH`9>b1HtY zAqk`3gplJy&H7o5r;vE?6n!~8$LRyl%t6eiGuB^xY{~P?O1!p?nn5LO+N!#(p!;l8 z1Fxt}XZ(bWeP6e6Zrw5upTOP%Wnk~J_{Nz#n_s)QM6_FO6E{8M5K`(s5lL_Zk{sa1 zc6yYbQfE^bHC{h<+q6EnWEjy-P7F`-aPHeK%t$!Z0au zn z-x85DDCDD{HLjLU9=GaGu~?oJ1}d1>3-I9{_trUwYBgZVb>2w(>2|f4pwP9tHGp{P zzeO~E0cE)t9Xd&nraDWo2ENWmvjnboC&d|0Ixh$_2x(EYmX{QK=_XLKwkD$U*k~J}R@}D2nFZ=%ujH04Z z&|5}A;g~0K9o9aukQ}uw`9k3142vmbp+++V3Twbt#Zp+|F4y+`-}Z1N*+;e>fd4Wo za}C4j;djC~0#dzI{!f*16 zGM{Zj>-R((Bh0>0&$P-xsZs_MpfsW}9*l8hmxe!ZtdxeIadfIsv)BAY`$kPZ{;H2^ z2^s)JM>$s;3^6-*a&~@mx1vi$ptI?Ez1D?>W+l%LR8X!|0R04EGPvpl{fw1T>b(Yz zK~6jRjrx>+=Ag}!&&%eegN0hX(sSnlI~8+h*FRmd8w^=3N`Tki1y=(P^yVHo`C5zY zQUg6cLC*-5#3Z-Rz!noaCx}_OA+>1Z?8W0im&$e)`jqqM!@y|Tsl}m5I15Eh^Hq*y zm>6kNBJf@r|8-t2SCsh8JlgH&@a^YZ9cXUnKU0ZS1XS>>Y73%fymV(57@=QefC$ZB zi#R#o@J#?Yk5h^F5a1 zPx@f?%iN#&uBXds*%FCwg_&%~s8ZPlV{=@vG(P2Ol&DPTvvh8u^k~4Evqu;#4_>-3 zf8d?33n;1^fAc8jljt-!))r`u-DxiPD(H|0{qP@!$}0caSTk|oTp*ZQIYuQ+{hcJ| zT(fF!FAAG#SO#X30@#9QH#ROyOsF5 z%~Ltpo-Ro+0nU$vuiW?i=D1$rP8(mi?wb!9lzf%&gW{xolY&mi7YcBDI{U2AO+NQ| zxg_8QpR@zfUep9(L3e$?(C?vC{#IIR@6B9ODfdU02Jo>zs3_G!2OV+EK0aIEhU%3q z%rS4e3rT{aWgaDxmKR{M_ua`5jZ7&&L{Q8_#w7WNqdA%C^*lGnbV-YOF;(f0M3l@j zRhWsXO3rd+e@Nl|PvxXYvf)~7R&Po|j1Ncob`dT-r;6!nVVIMI*=t4xZeoVR?{ z+fLCAldv3$znFY7h{DVR1Ur0BfSli)5n|cNOat45kQ0<&?E{k7uHOfoNIRF1E=(aY zFz)T}Kt01ZPL4%vDgr~k)&hD<-N&_(n%|MlKsFotLWL<{Qk6`ycxGR703}$aoha*s@7TYelWj!dhNv_b~HfS3RU%) zR+J!-r_ok)($=i5mt#R|LfzqioApF4!5bX zaSuaD2KDdl?>$|Mh3b|BtDd}#dJ|FpDozoijHZEQq-&*(m7odCPd`@` zAW7ek&nWzc#LV9G7FCiRU=pW&gjAubMZ)>T2Wuk*DO4F~&JL5u>ZMQj$o=qOAD0or zi7KrOoYsXr63>)}3z48z_!^3<$sUP+pDWX~{=_k%7A5x)oW-H{&#FSTk&N?;B1e86 zR#wH}l-|7@$0_0O4%w36nAnUE29lS2tj5a0#~N4wM{p5VZtJ-!4VG&18%{Tkhy}sy ze6;I%i1s)C7v&07I%b1zA>1VOh_rb~zF;R8t}oV}go*WB5W}bX*Az6rD)5jK%$2e? z7ww`7e6ZOD()=KMmQiYQcIO21qO>iS(-h}YC#&Q$EYbeFh^JF>-sD}a8B^&k%0Q#3 z8RIqAlXdA_!M{ac`_JwO;3K|~kLOo0G;{=W74PM!?)&~&P&pNF^LSKneO{iB6yO(G zT=-{2uAf8y)Oy6SCFWHMs{DR*qMxa(1?7gKBYv8hl z_uh>*1d-ujZU&vF%=CHSVJ>=a%slJrUd&9*S-ca}TVsuhR(-sg(RPZ#CvxugH*y@R zwk`?UrXcP1+kwsW)ep%DeQ~^hsd4&X;NXPa@}Z)B0xRRTu1?$OVw}+V-{-@vCxVio zWD0`ryCZmGUMbqyM9lMb;qTJ7f{HI+Y^z)U>6FavCu3#!UbB@{RwW0bWwJ}V0kV^L40yQ~sTr6o|Xjt+Ng7I1!8t_z>~#=(gV>zVIXdC`4F__C<*mZZA#1kT62{s>vX!7u{0o}VEi6OzFS}YS5x!UnMM&3`10}i;uGfb|T@?K^ zJN(BncGw2-y*e>>dTg%OKq0cOC(0~C&fy%jQ33K~spEI`zjf4|^yuer6spGMi>85w zo)9V>@8}7+v96RHY%fnJ-|HoF7c8_3)?s$fLRJ^R$BHlf2NQ&sUL!7vpEMa!5CE%s zKjkTnGZ~W_EXqV@a&2Ap`cx7uyJ(gJ^1Fua1}k%gjiGAmBCwJU;G)8m5qpghVs))I z^kf%4@`g^?Op(n>B& zf(%wN^k^LPi_YISIn{zRf9I|LddqsJskeHIbLHX*l^&Mx z&!$xl9nC`(b-Y_9OgdPlO3L3?vFbzEQy1UL`+Q7K1)iI$x=? zWn)q~3=+KT%1lKbI1GvifQ7MHCc+b2itRFsK_->Q;l_X?E`NX0-UAuhV0vO2j2LPR zH}pX<^ueK*YY{wRJ-2T)|1RHaPexz1SkJnOh6tO9QPJk$dQ3*YqBKkb=fuK6iaDU) zVvt;9hiTyY(`eBYqB*8I8?c#h#SF>eaXQqHq5A5Vo|HaPC_C^%mHrD)T{bqZ7c{>M zaBb!v%oBVd(~*S*GO@##=pe>Yv#w_gOi*7i#adT|GRW%r)QQ%oE%;>)eyM=vvIXUv z-S&LEuOUX8CbCV=8Yi8OSiQDNV+#sbUkd&yDe6wU!G~W6qf@ivtHasP1XuDqaz;=+ z9pGnSc57yy1UBMGvm>7bXYwg{KZ1>3 zjIF;#p&iOIJ`HAu|2~m-lkam!bG%;e#Y4A-VN0LxL*6m*0t3__GtA))*n8bc7D%i- zz`V?X`4<}vu>`i5W>3*NYCPVUyr+ZY4D9X@&d`0j%psZI;7+}iK1Ci0Uv^vGDCW}B zHc#W_j{x4Nv(i%^&(*xl!80|en9glr*QtXVw6?;^lI7QV`lHit{_JiXJYzJaf5{hS zXrbn4+dRmRp4pw$c*c13nCsbE+#Dj(~rKbar zhBw~N77Zs=B*hk(Ly8 zEMl!E@!sTI&gdYGTN>PN10=lOpSQv0OzuOt%c)V)@WUt3!R-+!<~?`BvHgFF`;vZ+ ze=>kN_S?Jnl?kiA76dg~e&%E{e;;aK@w2|$+rlrtYK}rrK%GJ`35p>1c?yNF)&&-j*@>}D0{7F@!EQtW061%xjv6WE^1t)(dAuOFk?yI zS5tPDMxP$X&rugBWMaM*?tlevg-b%_qZZg*6MK5Z@}>Z-QU+(|c8(%1^WOfx#uP6P z56=`YN1=hXUW(;{?0GAXdIqcKZNb~73rDNG5uR3-Kl{ul{~GgLmWC%M$wm(MNB@1# z+fFRXb3-59-(N+)vlQ`wzsl~?+=?RG^U0e}AD=erjOp_7u9x8uuO*#`2wmT_y8&&j zEnAZQTu|{_a~(Moe8F;dXXP_sw4MSkyVOidY4uyN)2O}*y8b4uk&CEjt%T>n72N0L z$qK5Sef#O?m5tsqx}E^Tr;Vp+@=Zy9n^zw%=tyT*SD=ln`;TI}o`>7}u7?8hO;@S% z*Y1Fwx1YVPqvZk}CNj%K4Zh#Kk1*GXs7I&#IlR9qWE4~^8%($<#B}>!uCx?QZ3R2X zSti7~F@78SaYwqpiN@W)^ z$@i|Vhnbg?{j*E2>*sgf4|LBflm8~+mRl$L1nkwcscW-?iotc&;00t{76sf9T1P$( zXTHDhBRWrX{BZv@ku7y~z5Zw#9fb!x^go6}8sU)~+;;{1w*KA~aBU4wfD$dKh{!f| zS>E>HsQZg_h&%1Y*U8Bf8aPy+)Y(KrFzcwPF zq%tD-^$C-cYMhUac^!Hqlau|4vy)mkaEMw)q{?u3e;-vJZe3p=9_a%-?AegheZSoU z`yQ{Njg2p4#A$=koS0b4Xz}YQnWEM}B5F<1L3S=rDh5-`P#_VH&mfJazX35FsMTW} zFnblFc^^a6y2#gSqMgCqVrGWhQfqB4BpAQLn9h|<$X#TD>!3fBxCr}NeThNyra0iyC0) za#)vvwvdW_fH${#UDTY(3R9c|9A_|buZd!GhK8GT>g#1NwmjK#OB!O+Er$e-vvgsAj>}wKaq!jQ{xq-d*(d;KT$opArd-^k~wydq{`b! zy6pp{+~NK?se7oUcB7=#t4O18?pi?~+b;8B1L@8UNY*~!P&qN3fnizUVAso93S@v5LfM-IB}H}xMGZtzcA!ddT+u!o>96Nx;*4}W91PRiyIE&$k}T~ zGnzxlzy+kUo$EE5F`E1+M&xk|lc0i#>FC!_Tvg%16p#<8tmVt-&p$Ssv6}F>N3JF| z{UGQq1qJ5porQe6ri4~1`l}OrXg8mda`&%dT!;*6C8nH{>Isc;9dwt>;Er!X2#51B zxPlbMN4ucJBBy|%FQpts6IW{P{+qa#UEn#UuN`d0?nK3VrMdW_wlsOX+V=$DaJxx=5h<3>C(D0p3f%-j%HppCp;$5^7}9Fk%C zfH^m;mo2oX^islhM8*xsYcjBcRc3-}7n)90w1`zv-}IftME*g7BypQr;Kyboq}QOc zg5-sXqsP?!Y181olO)7FYC>4{g;rtrZY<%TUDlea&NV_@XZ6{0m$eOiyh_Ch!h_nYFAeVu&su`SznO?Z59oiN`JI*ELYm{;aRzD){kQPYN@? zr%m05wb9^=s72Ts8ZWwQ8L%R|FXc+{lWj{5m~D*KjLEF^PYgQ?kuUP8;eQ7bv-M~6=dqq+O{}zVp70&rZj89s zN=%(>+Zc>E?olTwF}GP&)MFd0nY2!vyez_#X5j>4OCwnsA>s{-Q2yll2K;55Pl~!Y zrYq2>5xtSteQAg>@LmsJ5jlbXeH3Cl1%yGa_0QY!FKGV15a35?O_5g;ntox#P(W=N z%{(H}6Jad`bL!gk{CMM@!d&Qi5N#srBQmLJl#Xl&Bhiy>xQ;!W&=8A&nSHdx1guxg zqbTAdIIH_O4u+a-~-Z=Fe0sbm>(e|28FGC01I2tq!fi&Zu3tUDYN^?c8odh zu6?k`>4oTLffr(T_N|SC0O-^3q0$gt;<4PuSoNhR&!@56{!@+S{qKFl z=w{u1wv$2Q=)vU-#@EB&#&V02WQ?yz`&5=B%RBDF;%}4c0z4kV@<_&VJCs;lLik>6 zCq4eM7`ODn(U!aaMQh=x)>rtSU_-cSOSo!#U5`sdvKu@)K*#4goj=GoOyDIZQC3VO z!Qc4Im^P$ob4F`iNfnlA;Rx1ORVpwa!l!%`7T~_2lT+AxM6M&<8L0N^h-F8)vK_B1E%DnJPoi`|aMt9l)_ZM5%m zMU?|&1pbLi*c)IflJxek8B;1zPBl%EB+eh9bq_-yHO+e7Usvz%5&aogU zBaa>IEpGxzw>Bqk9{M2kyxH`uJMZy9{-^PfT!lHmq~6*R55o@4P*kq8K=1Wd0$e8u zey?{s0z)UT7QLZg;p4BByfbgX@NuEO1OyHVr=_#B6ddoe7O#d zckOQ`DYXw0E_WCLt^%rG?@L|pZG?MsaV_m4?D(lgDey>FL+sR~7fo4*X49bjI(laR z*lI2caPT#wJLG32a}JZ!WQ9inMqXCdc%ojA1?o;=bl-ykvKKF#hGJDk5m(wGY+X-iv6<41KDa^Be?Bxn6aV3%iHA<|0iY;-dR5fV@L#0Kx&7lYMOV-; zq-;Y3C6WOTY%4#+UIPS6k^w|r==*PHhV7IK!H0U)xmZllXsQdgq^31pNakgT-)Dv8+|zE|3mVL^ZX%q2-9=eLlccm|b!Kv69GI5Y)X< z81{)z?f#AttN>_W3rtb}+s<&`&)}~^Y|cvb%&*Lu&qDZ0%SFV^7JW1fXkzQO*e_Z7 zB-%{gz#gsiG%71xwzXz-Aa0z>yZ{iZNe8o|w42`u^F#_f9W2M|^WOfr z2_=|eu`&2t`M)P-*g7+ygN@Uag{{WY0?C65NGHF_`KOs;kFSo}K8_e{_}fkd#VIP8 z)UGjg+`<6QFl#8H^c3%5u&LuWG{SR+D{6N3C`QO7b1=H1$&KA?3)uv^sRo^`fC1U4 zfE-3WI1}Ua9u##~bVWaK+>OwtxgwIYfUVS5nQ{U6)!`>u5e{Nz&D7_YbfI0;b)58} zs9dIqZJZFo*9RJ@VyqBr)ZTU^BTQpX>t|I*bIg8oOuvT60|TU5D{c7ns>X1!+G2Z> z{2iGP07-8bR{0{It&qAYu23zslU5t`!U$KwaHz;|=-_4dyNlHpWwkjnYaMWWd7$X| zps0?(TcH?BxGw5v8=nt%-3?t)7pc1m_H=zze^qFPk7e-_-^+6 z=#p`*PsD6d+BM|HNyfKDI0UB&^&R1v!L3c{rp?a5{`TnMzR2VEy#C~(ywPFh)}97B zqO-tVrPd&y1ox01@EW%TUgQ2km~9A;woG;OQ;}g=tB<9dJ&%HGkzRt62<}fCuJPXx zeSP7`Cb>-M=KLq`Ybkhr%{x3jnM+P#b^V_sSN8;Gv@~UT9!I0@A^aikFVoUJ0b!0J zKjeFC;JCZf(^p?9xXN>Q>AXcEBl0SsD1v`JiqjMYkDxy3o8w(LymnpRJ=pjA zByUQx!IKbg!?Y&V3y;-;7;cPRQ6zLZc{86-25lt(?!~`R??aSzt9@mXmpLANc_^--XSK=mf-2IJpR|n4zp|VMV@0uyqkW08X8HjfXy7Q9{e_}=B_Ci(WY(-G|j?kJ_h z^B(*&6z=j11DrshWz;#p3Q6fT0G`;Qk?F~tcZ7R9$D{IHiDZkkRjOgzxJ((} zK?ltl!y$}sn|>)1R|(E+HPJVV5RzeZPGedQ6X`z6sha3K5cHkSB45rP`~cLD^L-8F zoPzH*R5erSYi8BLzyhU_1u~D$qH@k2kuzk0azzLEE_SC3P6xniPHp7I#Q{ShD}vQ8 zqrbS4>!VIh*lMFDOYe=C$9EOR-P!%9q}NRUtBWg*wf2{Rc*zj>;bGC#yq6l z_`YBavIzj#L}$@E$F2k0#H?u(RYKvCLN(JNHj@qjw&C$#N8OHxbMwZUTSz&k^nTb} za9eP)fQNwF&>eA^RMZ;Vj0cp<*%G`=y3&Sfv4&*%b*Dl#b6kG>pLo*(ctxBIKH*@T z%cBliiN^IhzK}JJSG&1axiwgqX2SwqYZ)9<3L54gmstk3(?&1JmHayIksrJIID2&! zXk?0i9w@4>*QRd&kQ>M*fW-8vNv00Sat}>g1^bkn9Mix=M)wR9bWN691i2fu}=Pg&jW>IC^CEFSt zPNnHHB;>xr402944jIPv=!Q~>7%BK#cBJmv&t9#t{BjRR)quRw53tQlb`0Omg`~1{ z$`Rjj7#Yy8u9KV{vH1VMe3?Zt`3Ac4e+b-zPV#kA*b5Db>%5>0CvIq_Quv9;sR`sS z;)ulk*36^|EvG1SjrPlf_Ln}f$Bk%ZP-xx0uSCC+YSJsemC|tU9YMyW6&*u1jJvAh zq}=8a2+2bC4Se8hv9}qkd)$%b|S`_q!*V%nDX=$Dl+# zOb)*@UU$t(o9h4V$yUcoplPAG8w7=| From 8a8e88c0ca74f02441fef06463a68ca3a6a99dad Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 15:02:29 +0100 Subject: [PATCH 314/361] add e2e test for updating metadata --- .../explorative_annotations_view.tsx | 1 - .../backend-snapshot-tests/annotations.e2e.ts | 20 +++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 92ce1c7ce93..c0f1936425a 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -707,7 +707,6 @@ class ExplorativeAnnotationsView extends React.PureComponent { title: "Stats", width: 150, render: (__: any, annotation: APIAnnotationInfo) => ( - // todop: don't use annotation.stats layer.tracingId), diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index 4e4dcd9b020..c79912ffa93 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -149,8 +149,6 @@ async function sendUpdateActions(explorational: APIAnnotation, queue: SaveQueueE ); } -// TODOp: Add tests for new update actions added in this pr (including updateAnnotationMetadata as this part of testing was removed editAnnotation() test case) - test.serial("Send update actions and compare resulting tracing", async (t) => { const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); const tracingId = createdExplorational.annotationLayers[0].tracingId; @@ -244,3 +242,21 @@ test("Update Metadata for Skeleton Tracing", async (t) => { const tracings = await api.getTracingsForAnnotation(createdExplorational); t.snapshot(replaceVolatileValues(tracings[0])); }); + +test.serial("Send update actions for updating metadata", async (t) => { + const createdExplorational = await api.createExplorational(datasetId, "skeleton", false, null); + const newDescription = "new description"; + const [saveQueue] = addVersionNumbers( + createSaveQueueFromUpdateActions( + [[UpdateActions.updateMetadataOfAnnotation(newDescription)]], + 123456789, + ), + 0, + ); + await sendUpdateActions(createdExplorational, saveQueue); + const annotation = await api.getAnnotationProto( + createdExplorational.tracingStore.url, + createdExplorational.id, + ); + t.is(annotation.description, newDescription); +}); From 1e919797efc636f2f372f153a42396822eaad999 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 15:14:29 +0100 Subject: [PATCH 315/361] clean up typing of update actions --- .../oxalis/model/sagas/save_saga.ts | 6 +----- .../oxalis/model/sagas/update_actions.ts | 14 +++++--------- frontend/javascripts/oxalis/store.ts | 4 ++-- .../javascripts/oxalis/view/version_entry.tsx | 19 +++++++++++-------- 4 files changed, 19 insertions(+), 24 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 299024f09f0..984f815934d 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -41,7 +41,6 @@ import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; import { updateTdCamera, type UpdateActionWithoutIsolationRequirement, - type UpdateActionWithTracingId, } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; @@ -284,10 +283,7 @@ function* markBucketsAsNotDirty(saveQueue: Array) { for (const saveEntry of saveQueue) { for (const updateAction of saveEntry.actions) { if (updateAction.name === "updateBucket") { - // The ID must belong to a segmentation layer because we are handling an updateBucket - // action. Moreover, updateBucket is layer dependent and thus has an actionTracingId. - const { actionTracingId: tracingId } = - updateAction.value as UpdateActionWithTracingId["value"]; + const { actionTracingId: tracingId } = updateAction.value; const segmentationLayer = Model.getSegmentationTracingLayer(tracingId); const segmentationMagInfo = yield* call(getMagInfo, segmentationLayer.mags); diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 36ac80c6071..5297b2f7a66 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -45,7 +45,7 @@ type UpdateUserBoundingBoxesInVolumeTracingUpdateAction = ReturnType< typeof updateUserBoundingBoxesInVolumeTracing >; export type UpdateBucketUpdateAction = ReturnType; -type UpdateSegmentGroupsUpdateAction = ReturnType; +export type UpdateSegmentGroupsUpdateAction = ReturnType; type UpdateTreeGroupsUpdateAction = ReturnType; @@ -104,12 +104,6 @@ export type UpdateActionWithoutIsolationRequirement = | SplitAgglomerateUpdateAction | MergeAgglomerateUpdateAction; -export type UpdateActionWithTracingId = UpdateAction & { - value: UpdateAction["value"] & { - actionTracingId: string; - }; -}; - // This update action is only created in the frontend for display purposes type CreateTracingUpdateAction = { name: "createTracing"; @@ -123,11 +117,13 @@ type ImportVolumeTracingUpdateAction = { value: { largestSegmentId: number; }; -}; // This update action is only created by the backend -type AddSegmentIndexUpdateAction = { +}; +// This update action is only created by the backend +export type AddSegmentIndexUpdateAction = { name: "addSegmentIndex"; value: { actionTimestamp: number; + actionTracingId: string; }; }; type AddServerValuesFn = (arg0: T) => T & { diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index d49560bea64..bce0672be12 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -50,7 +50,7 @@ import type { } from "oxalis/constants"; import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateAction, UpdateActionWithTracingId } from "oxalis/model/sagas/update_actions"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; import type DiffableMap from "libs/diffable_map"; @@ -453,7 +453,7 @@ export type SaveQueueEntry = { version: number; timestamp: number; authorId: string; - actions: Array; + actions: Array; transactionId: string; transactionGroupCount: number; transactionGroupIndex: number; diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index 0cb02fa0861..cd77de50f78 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -39,10 +39,13 @@ import type { UpdateAnnotationLayerNameUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, - UpdateActionWithTracingId, + UpdateAction, AddLayerToAnnotationUpdateAction, DeleteAnnotationLayerUpdateAction, UpdateMetadataOfAnnotationUpdateAction, + UpdateBucketUpdateAction, + UpdateSegmentGroupsUpdateAction, + AddSegmentIndexUpdateAction, } from "oxalis/model/sagas/update_actions"; import FormattedDate from "components/formatted_date"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; @@ -150,7 +153,7 @@ const descriptionFns: Record< icon: , }), updateBucket: ( - firstAction: UpdateActionWithTracingId, + firstAction: UpdateBucketUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -161,7 +164,7 @@ const descriptionFns: Record< }; }, updateSegmentGroups: ( - firstAction: UpdateActionWithTracingId, + firstAction: UpdateSegmentGroupsUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -202,7 +205,7 @@ const descriptionFns: Record< icon: , }), createSegment: ( - firstAction: UpdateActionWithTracingId & CreateSegmentUpdateAction, + firstAction: CreateSegmentUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -213,7 +216,7 @@ const descriptionFns: Record< }; }, updateSegment: ( - firstAction: UpdateActionWithTracingId & UpdateSegmentUpdateAction, + firstAction: UpdateSegmentUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -224,7 +227,7 @@ const descriptionFns: Record< }; }, deleteSegment: ( - firstAction: UpdateActionWithTracingId & DeleteSegmentUpdateAction, + firstAction: DeleteSegmentUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -235,7 +238,7 @@ const descriptionFns: Record< }; }, deleteSegmentData: ( - firstAction: UpdateActionWithTracingId & DeleteSegmentDataUpdateAction, + firstAction: DeleteSegmentDataUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { @@ -246,7 +249,7 @@ const descriptionFns: Record< }; }, addSegmentIndex: ( - firstAction: UpdateActionWithTracingId, + firstAction: AddSegmentIndexUpdateAction, _actionCount: number, tracing: HybridTracing, ): Description => { From 20ea661ca8968c2ce73cf78a0af4ea862a068f86 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 15:18:47 +0100 Subject: [PATCH 316/361] update todo comment --- frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts | 3 ++- .../oxalis/view/right-border-tabs/sidebar_context_menu.tsx | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts index c9aa2351d4c..dbc7dba6729 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga_constants.ts @@ -13,7 +13,8 @@ export const SETTINGS_MAX_RETRY_COUNT = 20; // 20 * 15s == 5m export const MAXIMUM_ACTION_COUNT_PER_BATCH = 1000; -// todop: should this be smarter? +// See #8274. +// This constant used to be the following: // export const MAXIMUM_ACTION_COUNT_PER_SAVE = { // skeleton: 15000, // volume: 3000, diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx index 0bcfd371d6d..b163279734a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/sidebar_context_menu.tsx @@ -40,7 +40,7 @@ type ContextMenuProps = { contextMenuPosition: [number, number] | null | undefined; hideContextMenu: () => void; menu: MenuProps | null | undefined; - className: string; // todop: should be unique? + className: string; }; export function ContextMenuContainer(props: ContextMenuProps) { From d7cb2e2f87a45a00ef0b56dc18719ac3dd4ed512 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 15:36:02 +0100 Subject: [PATCH 317/361] unused import --- frontend/javascripts/oxalis/view/version_entry.tsx | 1 - webknossos-datastore/proto/Annotation.proto | 2 +- .../tracingstore/annotation/TSAnnotationService.scala | 8 ++++---- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index cd77de50f78..6b414d7c234 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -39,7 +39,6 @@ import type { UpdateAnnotationLayerNameUpdateAction, UpdateMappingNameUpdateAction, DeleteSegmentDataUpdateAction, - UpdateAction, AddLayerToAnnotationUpdateAction, DeleteAnnotationLayerUpdateAction, UpdateMetadataOfAnnotationUpdateAction, diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 09878eaba80..4ef34a60e1e 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -19,5 +19,5 @@ message AnnotationProto { message AnnotationLayerProto { required string tracingId = 1; required string name = 2; - required AnnotationLayerTypeProto type = 4; + required AnnotationLayerTypeProto type = 3; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 479921e342b..d3fbe006996 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -67,8 +67,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def get(annotationId: String, version: Option[Long])(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationProto] = for { - isTemporaryTracing <- temporaryTracingService.isTemporaryAnnotation(annotationId) - annotation <- if (isTemporaryTracing) temporaryTracingService.getAnnotation(annotationId) + isTemporaryAnnotation <- temporaryTracingService.isTemporaryAnnotation(annotationId) + annotation <- if (isTemporaryAnnotation) temporaryTracingService.getAnnotation(annotationId) else for { withTracings <- getWithTracings(annotationId, version) ?~> "annotation.notFound" @@ -101,8 +101,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))( - fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))(fromProtoBytes[ + AnnotationProto]) ?~> "getAnnotation.failed" // TODO pass this in here from caller? isn’t it newestMaterialized? annotation = annotationWithVersion.value annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForAnnotation.failed" annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( From 95853192190dd437224608ebb6f88a4c59856ced Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 15:47:53 +0100 Subject: [PATCH 318/361] save one lookup of annotationProto --- .../annotation/TSAnnotationService.scala | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index d3fbe006996..d9c9bba9b48 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -93,22 +93,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ => newInnerCache) updatedAnnotation <- materializedAnnotationInnerCache.getOrLoad( targetVersion, - _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) + _ => + getWithTracingsVersioned(annotationId, + newestMaterialized, + targetVersion, + reportChangesToWk = reportChangesToWk) ) } yield updatedAnnotation - private def getWithTracingsVersioned(annotationId: String, version: Long, reportChangesToWk: Boolean)( - implicit ec: ExecutionContext, - tc: TokenContext): Fox[AnnotationWithTracings] = + private def getWithTracingsVersioned( + annotationId: String, + newestMaterializedAnnotation: AnnotationProto, + version: Long, + reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - annotationWithVersion <- tracingDataStore.annotations.get(annotationId, Some(version))(fromProtoBytes[ - AnnotationProto]) ?~> "getAnnotation.failed" // TODO pass this in here from caller? isn’t it newestMaterialized? - annotation = annotationWithVersion.value - annotationWithTracings <- findTracingsForAnnotation(annotation) ?~> "findTracingsForAnnotation.failed" + annotationWithTracings <- findTracingsForAnnotation(newestMaterializedAnnotation) ?~> "findTracingsForAnnotation.failed" annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( annotationId, annotationWithTracings, - annotation.version, + newestMaterializedAnnotation.version, version // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters ) ?~> "findEditableMappingsForAnnotation.failed" updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" From e7b071f70940d982bfbc8d11f6c07d74acb1dea5 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 12 Dec 2024 18:51:02 +0100 Subject: [PATCH 319/361] fix sandbox view --- frontend/javascripts/oxalis/model_initialization.ts | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 2298f7d1598..6ddc1915f7b 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -239,14 +239,10 @@ export async function initialize( serverVolumeTracings, annotation.id, ); - if (annotationProto == null) { - // Satisfy TS. annotationProto should always exist if annotation exists. - throw new Error("Annotation protobuf should not be null."); - } initializeAnnotation( annotation, - annotationProto.version, - annotationProto.earliestAccessibleVersion, + annotationProto?.version ?? 1, + annotationProto?.earliestAccessibleVersion ?? 0, serverTracings, editableMappings, ); From 5ed14b07d5e41a954ff084192a58d974f5c6bbdd Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 12 Dec 2024 22:48:03 +0100 Subject: [PATCH 320/361] more verbose bucket loading errors, include Failure message --- .../datastore/services/DatasetErrorLoggingService.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala index b99dcf804e7..50e9c7bf3da 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetErrorLoggingService.scala @@ -61,18 +61,19 @@ class DatasetErrorLoggingService @Inject()( Fox.successful(data) } case Failure(msg, Full(e: InternalError), _) => - logger.error(s"Caught internal error while $label for $dataSourceId:", e) + logger.error(s"Caught internal error ($msg) while $label for $dataSourceId:", e) applicationHealthService.pushError(e) Fox.failure(msg, Full(e)) case Failure(msg, Full(exception), _) => if (shouldLog(dataSourceId.organizationId, dataSourceId.directoryName)) { - logger.error(s"Error while $label for $dataSourceId Stack trace: ${TextUtils.stackTraceAsString(exception)} ") + logger.error( + s"Error while $label for $dataSourceId: $msg – Stack trace: ${TextUtils.stackTraceAsString(exception)} ") registerLogged(dataSourceId.organizationId, dataSourceId.directoryName) } Fox.failure(msg, Full(exception)) case Failure(msg, Empty, _) => if (shouldLog(dataSourceId.organizationId, dataSourceId.directoryName)) { - logger.error(s"Error while $label for $dataSourceId, Empty failure") + logger.error(s"Error while $label for $dataSourceId, Failure without exception: $msg") registerLogged(dataSourceId.organizationId, dataSourceId.directoryName) } Fox.failure(msg) From b27b78b69dadf3162f095cb64efa6bfe163dd2af Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 08:35:15 +0100 Subject: [PATCH 321/361] set editableMappingsMayHavePendingUpdates to None after are updates are applied --- .../tracingstore/annotation/AnnotationWithTracings.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 78cb9c4aedb..0176dec5255 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -117,8 +117,12 @@ case class AnnotationWithTracings( case Left(t: SkeletonTracing) => Left(t.withVersion(newVersion)) case Right(t: VolumeTracing) => Right(t.withVersion(newVersion)) } - this.copy(annotation = annotation.copy(version = newVersion, skeletonMayHavePendingUpdates = None), - tracingsById = tracingsUpdated.toMap) + this.copy( + annotation = annotation.copy(version = newVersion, + skeletonMayHavePendingUpdates = None, + editableMappingsMayHavePendingUpdates = None), + tracingsById = tracingsUpdated.toMap + ) } def withNewUpdaters(materializedVersion: Long, targetVersion: Long): AnnotationWithTracings = { From fe31b442a45364f01f119a2c70221a0f64e458de Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 10:21:55 +0100 Subject: [PATCH 322/361] implement migrationf feedback part1 --- .../connections.py | 6 +++--- tools/migration-unified-annotation-versioning/main.py | 2 +- .../migration-unified-annotation-versioning/migration.py | 8 ++++++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/connections.py b/tools/migration-unified-annotation-versioning/connections.py index 367a70c7cec..5aa4b884317 100644 --- a/tools/migration-unified-annotation-versioning/connections.py +++ b/tools/migration-unified-annotation-versioning/connections.py @@ -13,7 +13,7 @@ def connect_to_fossildb(host: str, label: str): - max_message_length = 2147483647 + max_message_length = 2147483647 # 2G channel = grpc.insecure_channel(host, options=[("grpc.max_send_message_length", max_message_length), ("grpc.max_receive_message_length", max_message_length)]) stub = proto_rpc.FossilDBStub(channel) test_fossildb_health(stub, f"{label} FossilDB at {host}") @@ -40,7 +40,7 @@ def connect_to_postgres(postgres_config: str): def parse_connection_string(connection_string: str) -> Dict[str, Any]: pattern = r"^(?P\w+)@(?!.*@)(?P[^:/]+)(?::(?P\d+))?(?P/[^ ]*)?$" - match = re.match(pattern, connection_string) + match = re.match(pattern, connection_string.removeprefix("postgresql://")) if match: return { "user": match.group("user"), @@ -49,4 +49,4 @@ def parse_connection_string(connection_string: str) -> Dict[str, Any]: "database": match.group("database").lstrip("/") } else: - raise ValueError("Invalid postgres connection string, needs to be user@host:port/database.") + raise ValueError("Invalid postgres connection string, needs to be postgresql://user@host:port/database.") diff --git a/tools/migration-unified-annotation-versioning/main.py b/tools/migration-unified-annotation-versioning/main.py index 8f8e3029114..5d899ce6b23 100755 --- a/tools/migration-unified-annotation-versioning/main.py +++ b/tools/migration-unified-annotation-versioning/main.py @@ -17,7 +17,7 @@ def main(): parser.add_argument("--dst", type=str, help="Destination fossildb host and port", required=False) parser.add_argument("--dry", help="Only read and process data, do not write out results", action="store_true") parser.add_argument("--num_threads", help="Number of threads to migrate the annotations in parallel", type=int, default=1) - parser.add_argument("--postgres", help="Postgres connection specifier.", type=str, default="postgres@localhost:5432/webknossos") + parser.add_argument("--postgres", help="Postgres connection specifier, default is postgresql://postgres@localhost:5432/webknossos", type=str, default="postgresql://postgres@localhost:5432/webknossos") parser.add_argument("--previous_start", help="Previous run start time. Only annotations last modified after that time will be migrated. Use for second run in incremental migration. Example: 2024-11-27 10:37:30.171083", type=str) parser.add_argument("--start", help="Run “start time”. Only annotations last modified before that time will be migrated. Defaults to now. Change if FossilDB content is not up to date with postgres. Example: 2024-11-27 10:37:30.171083", type=str) parser.add_argument("--count_versions", help="Instead of migrating, only count materialized versions of the annotation", action="store_true") diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 5df336d671a..79a496631fe 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -13,6 +13,7 @@ import threading from functools import partial import heapq +import sys import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -55,9 +56,10 @@ def run(self): with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: executor.map(self.migrate_annotation, annotations) + log_since(self.before, f"Migrating all the {self.total_count} things") if self.failure_count > 0: logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") - log_since(self.before, f"Migrating all the {self.total_count} things") + sys.exit(1) def migrate_annotation(self, annotation): before = time.time() @@ -138,10 +140,12 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers for tracing_id, _ in layers: version_mapping[tracing_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. + # We use a priority queue to efficiently select which tracing each next update should come from. + # This effectively implements a merge sort queue = [] for i, update_groups_for_layer in enumerate(all_update_groups): if update_groups_for_layer: - # The priority queue sorts tupley lexicographically, so timestamp is the main sorting key here + # The priority queue sorts tuples lexicographically, so timestamp is the main sorting key here heapq.heappush(queue, (update_groups_for_layer[0], i, 0)) while queue: value, layer_index, element_index = heapq.heappop(queue) From 25bb7c21345a9b70bf440e2b23c03debc52e5bd5 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 10:22:40 +0100 Subject: [PATCH 323/361] Update MIGRATIONS.unreleased.md Co-authored-by: Norman Rzepka --- MIGRATIONS.unreleased.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 824491a6cf4..d9510d25571 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -9,8 +9,9 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). [Commits](https://github.com/scalableminds/webknossos/compare/24.12.0...HEAD) - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) - The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) -- FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) -- The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one. The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) +- The versioning scheme of annotations has been changed. That requires a larger migration including the FossilDB content. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one. The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Postgres Evolutions: - [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) From b2dfcaaca3603c51166e13f5a69ddce50151754c Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 13:56:53 +0100 Subject: [PATCH 324/361] wip: clean-up older migration run in case of reverts. use new fossilDB putMultipleVersions --- .../fossildbapi_pb2.py | 94 ++++++++++--------- .../fossildbapi_pb2_grpc.py | 66 +++++++++++++ .../migration.py | 59 ++++++++++-- 3 files changed, 168 insertions(+), 51 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py index 6eafe73fb96..6267f9144b6 100644 --- a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"K\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xc2\x0c\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"_\n\x1aPutMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\x12\x0e\n\x06values\x18\x04 \x03(\x0c\"A\n\x18PutMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\">\n\x18\x44\x65leteAllByPrefixRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0e\n\x06prefix\x18\x02 \x02(\t\"?\n\x16\x44\x65leteAllByPrefixReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"K\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xe4\x0e\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12\x91\x01\n\x13PutMultipleVersions\x12<.com.scalableminds.fossildb.proto.PutMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.PutMultipleVersionsReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12\x8b\x01\n\x11\x44\x65leteAllByPrefix\x12:.com.scalableminds.fossildb.proto.DeleteAllByPrefixRequest\x1a\x38.com.scalableminds.fossildb.proto.DeleteAllByPrefixReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fossildbapi_pb2', globals()) @@ -32,46 +32,54 @@ _PUTREQUEST._serialized_end=376 _PUTREPLY._serialized_start=378 _PUTREPLY._serialized_end=427 - _DELETEREQUEST._serialized_start=429 - _DELETEREQUEST._serialized_end=494 - _DELETEREPLY._serialized_start=496 - _DELETEREPLY._serialized_end=548 - _GETMULTIPLEVERSIONSREQUEST._serialized_start=550 - _GETMULTIPLEVERSIONSREQUEST._serialized_end=657 - _GETMULTIPLEVERSIONSREPLY._serialized_start=659 - _GETMULTIPLEVERSIONSREPLY._serialized_end=758 - _GETMULTIPLEKEYSREQUEST._serialized_start=760 - _GETMULTIPLEKEYSREQUEST._serialized_end=875 - _GETMULTIPLEKEYSREPLY._serialized_start=877 - _GETMULTIPLEKEYSREPLY._serialized_end=992 - _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=994 - _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=1104 - _DELETEMULTIPLEVERSIONSREPLY._serialized_start=1106 - _DELETEMULTIPLEVERSIONSREPLY._serialized_end=1174 - _LISTKEYSREQUEST._serialized_start=1176 - _LISTKEYSREQUEST._serialized_end=1251 - _LISTKEYSREPLY._serialized_start=1253 - _LISTKEYSREPLY._serialized_end=1321 - _LISTVERSIONSREQUEST._serialized_start=1323 - _LISTVERSIONSREQUEST._serialized_end=1408 - _LISTVERSIONSREPLY._serialized_start=1410 - _LISTVERSIONSREPLY._serialized_end=1486 - _BACKUPREQUEST._serialized_start=1488 - _BACKUPREQUEST._serialized_end=1503 - _BACKUPREPLY._serialized_start=1505 - _BACKUPREPLY._serialized_end=1602 - _RESTOREFROMBACKUPREQUEST._serialized_start=1604 - _RESTOREFROMBACKUPREQUEST._serialized_end=1630 - _RESTOREFROMBACKUPREPLY._serialized_start=1632 - _RESTOREFROMBACKUPREPLY._serialized_end=1695 - _COMPACTALLDATAREQUEST._serialized_start=1697 - _COMPACTALLDATAREQUEST._serialized_end=1720 - _COMPACTALLDATAREPLY._serialized_start=1722 - _COMPACTALLDATAREPLY._serialized_end=1782 - _EXPORTDBREQUEST._serialized_start=1784 - _EXPORTDBREQUEST._serialized_end=1842 - _EXPORTDBREPLY._serialized_start=1844 - _EXPORTDBREPLY._serialized_end=1898 - _FOSSILDB._serialized_start=1901 - _FOSSILDB._serialized_end=3503 + _PUTMULTIPLEVERSIONSREQUEST._serialized_start=429 + _PUTMULTIPLEVERSIONSREQUEST._serialized_end=524 + _PUTMULTIPLEVERSIONSREPLY._serialized_start=526 + _PUTMULTIPLEVERSIONSREPLY._serialized_end=591 + _DELETEREQUEST._serialized_start=593 + _DELETEREQUEST._serialized_end=658 + _DELETEREPLY._serialized_start=660 + _DELETEREPLY._serialized_end=712 + _DELETEALLBYPREFIXREQUEST._serialized_start=714 + _DELETEALLBYPREFIXREQUEST._serialized_end=776 + _DELETEALLBYPREFIXREPLY._serialized_start=778 + _DELETEALLBYPREFIXREPLY._serialized_end=841 + _GETMULTIPLEVERSIONSREQUEST._serialized_start=843 + _GETMULTIPLEVERSIONSREQUEST._serialized_end=950 + _GETMULTIPLEVERSIONSREPLY._serialized_start=952 + _GETMULTIPLEVERSIONSREPLY._serialized_end=1051 + _GETMULTIPLEKEYSREQUEST._serialized_start=1053 + _GETMULTIPLEKEYSREQUEST._serialized_end=1168 + _GETMULTIPLEKEYSREPLY._serialized_start=1170 + _GETMULTIPLEKEYSREPLY._serialized_end=1285 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=1287 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=1397 + _DELETEMULTIPLEVERSIONSREPLY._serialized_start=1399 + _DELETEMULTIPLEVERSIONSREPLY._serialized_end=1467 + _LISTKEYSREQUEST._serialized_start=1469 + _LISTKEYSREQUEST._serialized_end=1544 + _LISTKEYSREPLY._serialized_start=1546 + _LISTKEYSREPLY._serialized_end=1614 + _LISTVERSIONSREQUEST._serialized_start=1616 + _LISTVERSIONSREQUEST._serialized_end=1701 + _LISTVERSIONSREPLY._serialized_start=1703 + _LISTVERSIONSREPLY._serialized_end=1779 + _BACKUPREQUEST._serialized_start=1781 + _BACKUPREQUEST._serialized_end=1796 + _BACKUPREPLY._serialized_start=1798 + _BACKUPREPLY._serialized_end=1895 + _RESTOREFROMBACKUPREQUEST._serialized_start=1897 + _RESTOREFROMBACKUPREQUEST._serialized_end=1923 + _RESTOREFROMBACKUPREPLY._serialized_start=1925 + _RESTOREFROMBACKUPREPLY._serialized_end=1988 + _COMPACTALLDATAREQUEST._serialized_start=1990 + _COMPACTALLDATAREQUEST._serialized_end=2013 + _COMPACTALLDATAREPLY._serialized_start=2015 + _COMPACTALLDATAREPLY._serialized_end=2075 + _EXPORTDBREQUEST._serialized_start=2077 + _EXPORTDBREQUEST._serialized_end=2135 + _EXPORTDBREPLY._serialized_start=2137 + _EXPORTDBREPLY._serialized_end=2191 + _FOSSILDB._serialized_start=2194 + _FOSSILDB._serialized_end=4086 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py index 7f738de9658..afb3aac4a4a 100644 --- a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py @@ -39,6 +39,11 @@ def __init__(self, channel): request_serializer=fossildbapi__pb2.PutRequest.SerializeToString, response_deserializer=fossildbapi__pb2.PutReply.FromString, ) + self.PutMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleVersions', + request_serializer=fossildbapi__pb2.PutMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.PutMultipleVersionsReply.FromString, + ) self.Delete = channel.unary_unary( '/com.scalableminds.fossildb.proto.FossilDB/Delete', request_serializer=fossildbapi__pb2.DeleteRequest.SerializeToString, @@ -49,6 +54,11 @@ def __init__(self, channel): request_serializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.SerializeToString, response_deserializer=fossildbapi__pb2.DeleteMultipleVersionsReply.FromString, ) + self.DeleteAllByPrefix = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/DeleteAllByPrefix', + request_serializer=fossildbapi__pb2.DeleteAllByPrefixRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.DeleteAllByPrefixReply.FromString, + ) self.ListKeys = channel.unary_unary( '/com.scalableminds.fossildb.proto.FossilDB/ListKeys', request_serializer=fossildbapi__pb2.ListKeysRequest.SerializeToString, @@ -114,6 +124,12 @@ def Put(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def PutMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Delete(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -126,6 +142,12 @@ def DeleteMultipleVersions(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def DeleteAllByPrefix(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def ListKeys(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -190,6 +212,11 @@ def add_FossilDBServicer_to_server(servicer, server): request_deserializer=fossildbapi__pb2.PutRequest.FromString, response_serializer=fossildbapi__pb2.PutReply.SerializeToString, ), + 'PutMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.PutMultipleVersions, + request_deserializer=fossildbapi__pb2.PutMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.PutMultipleVersionsReply.SerializeToString, + ), 'Delete': grpc.unary_unary_rpc_method_handler( servicer.Delete, request_deserializer=fossildbapi__pb2.DeleteRequest.FromString, @@ -200,6 +227,11 @@ def add_FossilDBServicer_to_server(servicer, server): request_deserializer=fossildbapi__pb2.DeleteMultipleVersionsRequest.FromString, response_serializer=fossildbapi__pb2.DeleteMultipleVersionsReply.SerializeToString, ), + 'DeleteAllByPrefix': grpc.unary_unary_rpc_method_handler( + servicer.DeleteAllByPrefix, + request_deserializer=fossildbapi__pb2.DeleteAllByPrefixRequest.FromString, + response_serializer=fossildbapi__pb2.DeleteAllByPrefixReply.SerializeToString, + ), 'ListKeys': grpc.unary_unary_rpc_method_handler( servicer.ListKeys, request_deserializer=fossildbapi__pb2.ListKeysRequest.FromString, @@ -325,6 +357,23 @@ def Put(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def PutMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleVersions', + fossildbapi__pb2.PutMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.PutMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def Delete(request, target, @@ -359,6 +408,23 @@ def DeleteMultipleVersions(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def DeleteAllByPrefix(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/DeleteAllByPrefix', + fossildbapi__pb2.DeleteAllByPrefixRequest.SerializeToString, + fossildbapi__pb2.DeleteAllByPrefixReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def ListKeys(request, target, diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 79a496631fe..20df48672ab 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -76,7 +76,9 @@ def migrate_annotation(self, annotation): if self.args.verbose: logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) - layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) + layer_version_mapping, included_revert = self.migrate_updates(annotation, mapping_id_map) + # if included_revert and self.args.previous_start is not None: + self.clean_up_previously_migrated(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") @@ -101,10 +103,11 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map[tracing_id] = editable_mapping_id return mapping_id_map - def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> List[Tuple[int, int, bytes]]: + def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> Tuple[List[Tuple[int, int, bytes]], bool]: batch_size = 1000 newest_version = self.get_newest_version(tracing_or_mapping_id, collection) updates_for_layer = [] + included_revert = False next_version = newest_version for batch_start, batch_end in reversed(list(batch_range(newest_version, batch_size))): if batch_start > next_version: @@ -116,24 +119,29 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: update_group, timestamp, revert_source_version = self.process_update_group(tracing_or_mapping_id, layer_type, update_group, json_encoder, json_decoder) if revert_source_version is not None: next_version = revert_source_version + included_revert = True else: next_version -= 1 if revert_source_version is None: # skip the revert itself too, since we’re ironing them out updates_for_layer.append((timestamp, version, update_group)) updates_for_layer.reverse() - return updates_for_layer + return updates_for_layer, included_revert - def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[LayerVersionMapping, bool]: all_update_groups = [] json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() layers = list(annotation["layers"].items()) + included_revert = False for tracing_id, layer_type in layers: collection = self.update_collection_for_layer_type(layer_type) - all_update_groups.append(self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder)) + batch_updates, layer_included_revert = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(batch_updates) + included_revert = included_revert or layer_included_revert if tracing_id in mapping_id_map: editable_mapping_id = mapping_id_map[tracing_id] - all_update_groups.append(self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder)) + batch_updates, _ = self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(batch_updates) unified_version = 0 version_mapping = {} @@ -160,7 +168,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers next_element = all_update_groups[layer_index][element_index + 1] heapq.heappush(queue, (next_element, layer_index, element_index + 1)) - return version_mapping + return version_mapping, included_revert def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: if layer_type == "Skeleton": @@ -322,6 +330,11 @@ def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> N reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version, value=value)) assert_grpc_success(reply) + def save_multiple_versions(self, collection: str, key: str, versions: List[int], values: List[bytes]) -> None: + if self.dst_stub is not None: + reply = self.dst_stub.PutMultipleVersions(proto.PutMultipleVersionsRequest(collection=collection, key=key, versions=versions, values=values)) + assert_grpc_success(reply) + def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) @@ -344,11 +357,15 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: new_key = key if transform_key is not None: new_key = transform_key(key) + versions_to_save = [] + values_to_save = [] for version, value in zip(get_versions_reply.versions, get_versions_reply.values): if version not in layer_version_mapping[tracing_id]: continue new_version = layer_version_mapping[tracing_id][version] - self.save_bytes(collection, new_key, new_version, value) + versions_to_save.append(new_version) + values_to_save.append(value) + self.save_multiple_versions(collection, new_key, versions_to_save, values_to_save) current_start_after_key = key else: # We iterated past the elements of the current tracing @@ -437,6 +454,32 @@ def skeleton_may_have_pending_updates(self, annotation) -> bool: return False return "Skeleton" in annotation["layers"].values() + def clean_up_previously_migrated(self, annotation, mapping_id_map: MappingIdMap) -> None: + before = time.time() + logger.info(f"Cleaning up previously migrated annotation {annotation['_id']}...") + self.delete_all_versions("annotations", annotation["_id"]) + self.delete_all_versions("annotationUpdates", annotation["_id"]) + for tracing_id, layer_type in annotation["layers"].items(): + if layer_type == "Skeleton": + self.delete_all_versions("skeletons", tracing_id) + elif layer_type == "Volume": + self.delete_all_versions("volumes", tracing_id) + self.delete_all_with_prefix("volumeData", tracing_id) + self.delete_all_with_prefix("volumeSegmentIndex", tracing_id) + for mapping_id in mapping_id_map.values(): + self.delete_all_versions("editableMappingsInfo", mapping_id) + self.delete_all_with_prefix("editableMappingsAgglomerateToGraph", mapping_id) + self.delete_all_with_prefix("editableMappingsSegmentToAgglomerate", mapping_id) + log_since(before, f"Cleaning up previously migrated annotation {annotation['_id']}") + + def delete_all_versions(self, collection: str, id: str) -> None: + reply = self.dst_stub.DeleteMultipleVersions(proto.DeleteMultipleVersionsRequest(collection=collection, key=id)) + assert_grpc_success(reply) + + def delete_all_with_prefix(self, collection: str, prefix: str) -> None: + reply = self.dst_stub.DeleteAllByPrefix(proto.DeleteAllByPrefixRequest(collection=collection, prefix=id)) + assert_grpc_success(reply) + def read_annotation_list(self): checkpoint_set = self.read_checkpoints() before = time.time() From 5c264eca97d76d3debb5ca305d260ea585609f0b Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 14:43:34 +0100 Subject: [PATCH 325/361] migration: fix cleanup of previous, use multi-version put --- MIGRATIONS.unreleased.md | 1 + .../Annotation_pb2.py | 2 +- .../migration.py | 27 ++++++++++++------- .../annotation/TSAnnotationService.scala | 1 + 4 files changed, 21 insertions(+), 10 deletions(-) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index d9510d25571..1685c754fe6 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -10,6 +10,7 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) - The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - The versioning scheme of annotations has been changed. That requires a larger migration including the FossilDB content. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - New FossilDB version is required # TODO - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one. The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py index 9b1dc62ed74..9bae9eaad7d 100644 --- a/tools/migration-unified-annotation-versioning/Annotation_pb2.py +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -13,7 +13,7 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x04 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x03 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 20df48672ab..5d2bade4b56 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -76,9 +76,9 @@ def migrate_annotation(self, annotation): if self.args.verbose: logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) - layer_version_mapping, included_revert = self.migrate_updates(annotation, mapping_id_map) - # if included_revert and self.args.previous_start is not None: - self.clean_up_previously_migrated(annotation, mapping_id_map) + if self.includes_revert(annotation) and self.args.previous_start is not None: + self.clean_up_previously_migrated(annotation, mapping_id_map) + layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") @@ -127,17 +127,26 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: updates_for_layer.reverse() return updates_for_layer, included_revert - def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[LayerVersionMapping, bool]: + def includes_revert(self, annotation) -> bool: + json_encoder = msgspec.json.Encoder() + json_decoder = msgspec.json.Decoder() + layers = list(annotation["layers"].items()) + for tracing_id, layer_type in layers: + collection = self.update_collection_for_layer_type(layer_type) + _, layer_included_revert = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + if layer_included_revert: + return True + return False + + def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: all_update_groups = [] json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() layers = list(annotation["layers"].items()) - included_revert = False for tracing_id, layer_type in layers: collection = self.update_collection_for_layer_type(layer_type) - batch_updates, layer_included_revert = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + batch_updates, _ = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) all_update_groups.append(batch_updates) - included_revert = included_revert or layer_included_revert if tracing_id in mapping_id_map: editable_mapping_id = mapping_id_map[tracing_id] batch_updates, _ = self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) @@ -168,7 +177,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> Tuple[Lay next_element = all_update_groups[layer_index][element_index + 1] heapq.heappush(queue, (next_element, layer_index, element_index + 1)) - return version_mapping, included_revert + return version_mapping def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: if layer_type == "Skeleton": @@ -477,7 +486,7 @@ def delete_all_versions(self, collection: str, id: str) -> None: assert_grpc_success(reply) def delete_all_with_prefix(self, collection: str, prefix: str) -> None: - reply = self.dst_stub.DeleteAllByPrefix(proto.DeleteAllByPrefixRequest(collection=collection, prefix=id)) + reply = self.dst_stub.DeleteAllByPrefix(proto.DeleteAllByPrefixRequest(collection=collection, prefix=prefix)) assert_grpc_success(reply) def read_annotation_list(self): diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index d9c9bba9b48..374de4c250f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -332,6 +332,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Some(materializedSkeletonVersion + 1))(fromJsonBytes[List[UpdateAction]]) } else Fox.successful(List.empty) extraSkeletonUpdates = filterSkeletonUpdates(extraUpdates) + _ = logger.info(s"${extraSkeletonUpdates.length} extraSkeletonUpdates") } yield extraSkeletonUpdates }.getOrElse(Fox.successful(List.empty)) } else Fox.successful(List.empty) From e94874304c10e35f3fcf0d176713afee847b2d76 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 14:52:08 +0100 Subject: [PATCH 326/361] pin versions in requirements.txt --- .../requirements.txt | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/requirements.txt b/tools/migration-unified-annotation-versioning/requirements.txt index db02b494a13..d7a16104aed 100644 --- a/tools/migration-unified-annotation-versioning/requirements.txt +++ b/tools/migration-unified-annotation-versioning/requirements.txt @@ -1,6 +1,6 @@ -grpcio -argparse -psycopg2-binary -protobuf -rich -msgspec +grpcio==1.68.0 +argparse==1.4.0 +psycopg2-binary==2.9.10 +protobuf==5.28.3 +rich==13.9.4 +msgspec==0.18.6 From 6b80d2bae3f542b900f6788edb18999f56cb0407 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 14:56:36 +0100 Subject: [PATCH 327/361] iterate on migration guide --- MIGRATIONS.unreleased.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 1685c754fe6..34dcbacc4a7 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -11,8 +11,9 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - The versioning scheme of annotations has been changed. That requires a larger migration including the FossilDB content. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - New FossilDB version is required # TODO + - For the migration, a second FossilDB needs to be started. To do that, either use the docker image, a jar, or checkout the [fossilDB repository](https://github.com/scalableminds/fossildb). If you opened your old FossilDB with an options file, it probably makes sense to use the same options file for the new one as well. - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one. The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one (either change the ports of the existing programs, or exchange the data directories on disk). The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) ### Postgres Evolutions: - [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) From c44560cd04bd41c144b697332594c6788c3ebdda Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 15:29:53 +0100 Subject: [PATCH 328/361] bump fossildb version to 0.1.33 (master_504) --- MIGRATIONS.unreleased.md | 6 +++--- docker-compose.yml | 2 +- fossildb/version | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 34dcbacc4a7..fb3a567e35d 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -10,10 +10,10 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - Removed support for HTTP API versions 3 and 4. [#8075](https://github.com/scalableminds/webknossos/pull/8075) - The migration route `addSegmentIndex` was removed. If you haven’t done this yet, but need segment indices for your volume annotations, upgrade to an earlier version first, call addSegmentIndex, and then upgrade again. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - The versioning scheme of annotations has been changed. That requires a larger migration including the FossilDB content. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - - New FossilDB version is required # TODO + - New FossilDB version `0.1.33` (docker image `scalableminds/fossildb:master__504`) is required. - For the migration, a second FossilDB needs to be started. To do that, either use the docker image, a jar, or checkout the [fossilDB repository](https://github.com/scalableminds/fossildb). If you opened your old FossilDB with an options file, it probably makes sense to use the same options file for the new one as well. - - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. [#7917](https://github.com/scalableminds/webknossos/pull/7917) - - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one (either change the ports of the existing programs, or exchange the data directories on disk). The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. [#7917](https://github.com/scalableminds/webknossos/pull/7917) + - FossilDB must now be opened with new column family set `skeletons,volumes,volumeData,volumeSegmentIndex,editableMappingsInfo,editableMappingsAgglomerateToGraph,editableMappingsSegmentToAgglomerate,annotations,annotationUpdates`. + - The FossilDB content needs to be migrated. For that, use the python program at `tools/migration-unified-annotation-versioning` (see python main.py --help for instructions). Note that it writes to a completely new FossilDB, that must first be opened with the new column families, see above. The migration code needs to connect to postgres, to the old FossilDB and to the new. After the migration, replace the old FossilDB by the new one (either change the ports of the existing programs, or exchange the data directories on disk). The migration can also be run in several steps so that the majority of the data can already be migrated while WEBKNOSSOS is still running. Then only annotations that have been edited again since the first run need to be migrated in the incremental second run during a WEBKNOSSOS downtime. ### Postgres Evolutions: - [124-decouple-dataset-directory-from-name](conf/evolutions/124-decouple-dataset-directory-from-name) diff --git a/docker-compose.yml b/docker-compose.yml index 1aa85446a1d..84a737918ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -269,7 +269,7 @@ services: # FossilDB fossildb: - image: scalableminds/fossildb:master__484 + image: scalableminds/fossildb:master__504 command: - fossildb - -c diff --git a/fossildb/version b/fossildb/version index db7a480479e..50140e35363 100644 --- a/fossildb/version +++ b/fossildb/version @@ -1 +1 @@ -0.1.31 +0.1.33 From 601032f76410a018ffa1c7d579e0fd6eaf452953 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 16:17:05 +0100 Subject: [PATCH 329/361] migration: fix mapping id lookup --- .../migration.py | 37 ++++++++++--------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 5d2bade4b56..545268f129d 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -26,7 +26,7 @@ checkpoint_logger = logging.getLogger("migration-checkpoints") -LayerVersionMapping = Dict[str, Dict[int, int]] # tracing id to (old version to new version) +LayerVersionMapping = Dict[str, Dict[int, int]] # tracing id OR old mapping id to (old version to new version) MappingIdMap = Dict[str, str] # tracing id to editable mapping id @@ -143,19 +143,22 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() layers = list(annotation["layers"].items()) + tracing_ids_and_mapping_ids = [] for tracing_id, layer_type in layers: collection = self.update_collection_for_layer_type(layer_type) - batch_updates, _ = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) - all_update_groups.append(batch_updates) + layer_updates, _ = self.fetch_updates(tracing_id, layer_type, collection, json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(layer_updates) + tracing_ids_and_mapping_ids.append(tracing_id) if tracing_id in mapping_id_map: - editable_mapping_id = mapping_id_map[tracing_id] - batch_updates, _ = self.fetch_updates(editable_mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) - all_update_groups.append(batch_updates) + mapping_id = mapping_id_map[tracing_id] + layer_updates, _ = self.fetch_updates(mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) + all_update_groups.append(layer_updates) + tracing_ids_and_mapping_ids.append(mapping_id) unified_version = 0 version_mapping = {} - for tracing_id, _ in layers: - version_mapping[tracing_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. + for tracing_or_mapping_id in tracing_ids_and_mapping_ids: + version_mapping[tracing_or_mapping_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. # We use a priority queue to efficiently select which tracing each next update should come from. # This effectively implements a merge sort @@ -167,10 +170,10 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers while queue: value, layer_index, element_index = heapq.heappop(queue) timestamp, version, update_group = value - tracing_id = layers[layer_index][0] + tracing_or_mapping_id = tracing_ids_and_mapping_ids[layer_index] unified_version += 1 - version_mapping[tracing_id][version] = unified_version + version_mapping[tracing_or_mapping_id][version] = unified_version self.save_update_group(annotation['_id'], unified_version, update_group) if element_index + 1 < len(all_update_groups[layer_index]): @@ -347,11 +350,11 @@ def save_multiple_versions(self, collection: str, key: str, versions: List[int], def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) - def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): + def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): list_keys_page_size = 5000 versions_page_size = 500 - current_start_after_key = tracing_id + "." # . is lexicographically before / - newest_tracing_version = max(layer_version_mapping[tracing_id].keys()) + current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / + newest_tracing_version = max(layer_version_mapping[tracing_or_mapping_id].keys()) while True: list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) assert_grpc_success(list_keys_reply) @@ -359,7 +362,7 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: # We iterated towards the very end of the collection return for key in list_keys_reply.keys: - if key.startswith(tracing_id): + if key.startswith(tracing_or_mapping_id): for version_range_start, version_range_end in batch_range(newest_tracing_version, versions_page_size): get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key, oldestVersion=version_range_start, newestVersion=version_range_end)) assert_grpc_success(get_versions_reply) @@ -369,9 +372,9 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_id: versions_to_save = [] values_to_save = [] for version, value in zip(get_versions_reply.versions, get_versions_reply.values): - if version not in layer_version_mapping[tracing_id]: + if version not in layer_version_mapping[tracing_or_mapping_id]: continue - new_version = layer_version_mapping[tracing_id][version] + new_version = layer_version_mapping[tracing_or_mapping_id][version] versions_to_save.append(new_version) values_to_save.append(value) self.save_multiple_versions(collection, new_key, versions_to_save, values_to_save) @@ -440,7 +443,7 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se if skeleton_may_have_pending_updates: annotationProto.skeletonMayHavePendingUpdates = True if editable_mapping_may_have_pending_updates: - annotationProto.editableMappingMayHavePendingUpdates = True + annotationProto.editableMappingsMayHavePendingUpdates = True for tracing_id, tracing_type in annotation["layers"].items(): layer_proto = AnnotationProto.AnnotationLayerProto() layer_proto.tracingId = tracing_id From f3a2eebdbf57932af056bd728b161ff781e9d5be Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 16 Dec 2024 16:52:45 +0100 Subject: [PATCH 330/361] rename type to typ in AnnotationLayerProto and DeleteLayerAnnotationAction --- .../javascripts/oxalis/model/sagas/update_actions.ts | 4 ++-- frontend/javascripts/oxalis/model_initialization.ts | 2 +- .../test/fixtures/skeletontracing_server_objects.ts | 2 +- .../test/fixtures/tasktracing_server_objects.ts | 2 +- .../test/fixtures/volumetracing_server_objects.ts | 2 +- frontend/javascripts/types/api_flow_types.ts | 2 +- .../Annotation_pb2.py | 8 ++++---- .../migration-unified-annotation-versioning/migration.py | 2 +- .../datastore/models/annotation/AnnotationLayer.scala | 2 +- webknossos-datastore/proto/Annotation.proto | 2 +- .../annotation/AnnotationUpdateActions.scala | 2 +- .../tracingstore/annotation/AnnotationWithTracings.scala | 2 +- .../tracingstore/annotation/TSAnnotationService.scala | 8 ++++---- .../controllers/TSAnnotationController.scala | 9 ++++----- 14 files changed, 24 insertions(+), 25 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 5297b2f7a66..108313abd7e 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -639,11 +639,11 @@ export function addLayerToAnnotation(parameters: AnnotationLayerCreationParamete export function deleteAnnotationLayer( tracingId: string, layerName: string, - type: "Skeleton" | "Volume", + typ: "Skeleton" | "Volume", ) { return { name: "deleteLayerFromAnnotation", - value: { tracingId, layerName, type }, + value: { tracingId, layerName, typ }, } as const; } diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 6ddc1915f7b..6a4ee496a58 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -147,7 +147,7 @@ export async function initialize( return { tracingId: protoLayer.tracingId, name: protoLayer.name, - typ: protoLayer.type, + typ: protoLayer.typ, stats: // Only when the newest version is requested (version==null), // the stats are available in unversionedAnnotation. diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 3aea3fc9992..37e26c68c45 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -225,7 +225,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "skeleton layer name", - type: AnnotationLayerEnum.Skeleton, + typ: AnnotationLayerEnum.Skeleton, }, ], }; diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 48dad8b83b6..a2755eb6507 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -192,7 +192,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "Skeleton", - type: AnnotationLayerEnum.Skeleton, + typ: AnnotationLayerEnum.Skeleton, }, ], }; diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index 47ae6232d22..01fcebe67bc 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -133,7 +133,7 @@ export const annotationProto: APITracingStoreAnnotation = { { tracingId: TRACING_ID, name: "volume", - type: AnnotationLayerEnum.Volume, + typ: AnnotationLayerEnum.Volume, }, ], }; diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index d986c1770ca..9de1d34a80d 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -585,7 +585,7 @@ export type APITimeTrackingPerAnnotation = { type APITracingStoreAnnotationLayer = { readonly tracingId: string; readonly name: string; - readonly type: AnnotationLayerType; + readonly typ: AnnotationLayerType; }; export type APITracingStoreAnnotation = { diff --git a/tools/migration-unified-annotation-versioning/Annotation_pb2.py b/tools/migration-unified-annotation-versioning/Annotation_pb2.py index 9bae9eaad7d..2bae792f46a 100644 --- a/tools/migration-unified-annotation-versioning/Annotation_pb2.py +++ b/tools/migration-unified-annotation-versioning/Annotation_pb2.py @@ -13,17 +13,17 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x87\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12N\n\x04type\x18\x03 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x41nnotation.proto\x12&com.scalableminds.webknossos.datastore\"\x88\x02\n\x0f\x41nnotationProto\x12\x13\n\x0b\x64\x65scription\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x03\x12V\n\x10\x61nnotationLayers\x18\x03 \x03(\x0b\x32<.com.scalableminds.webknossos.datastore.AnnotationLayerProto\x12!\n\x19\x65\x61rliestAccessibleVersion\x18\x04 \x02(\x03\x12%\n\x1dskeletonMayHavePendingUpdates\x18\x05 \x01(\x08\x12-\n%editableMappingsMayHavePendingUpdates\x18\x06 \x01(\x08\"\x86\x01\n\x14\x41nnotationLayerProto\x12\x11\n\ttracingId\x18\x01 \x02(\t\x12\x0c\n\x04name\x18\x02 \x02(\t\x12M\n\x03typ\x18\x03 \x02(\x0e\x32@.com.scalableminds.webknossos.datastore.AnnotationLayerTypeProto*4\n\x18\x41nnotationLayerTypeProto\x12\x0c\n\x08Skeleton\x10\x01\x12\n\n\x06Volume\x10\x02') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'Annotation_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _ANNOTATIONLAYERTYPEPROTO._serialized_start=465 - _ANNOTATIONLAYERTYPEPROTO._serialized_end=517 + _ANNOTATIONLAYERTYPEPROTO._serialized_start=464 + _ANNOTATIONLAYERTYPEPROTO._serialized_end=516 _ANNOTATIONPROTO._serialized_start=61 _ANNOTATIONPROTO._serialized_end=325 _ANNOTATIONLAYERPROTO._serialized_start=328 - _ANNOTATIONLAYERPROTO._serialized_end=463 + _ANNOTATIONLAYERPROTO._serialized_end=462 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 545268f129d..24d5c2c6f82 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -451,7 +451,7 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Skeleton if tracing_type == "Volume": layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Volume - layer_proto.type = layer_type_proto + layer_proto.typ = layer_type_proto annotationProto.annotationLayers.append(layer_proto) self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala index 4eacd99fe59..b8b5286f55a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationLayer.scala @@ -25,7 +25,7 @@ object AnnotationLayer extends FoxImplicits { implicit val jsonFormat: OFormat[AnnotationLayer] = Json.format[AnnotationLayer] def fromProto(p: AnnotationLayerProto): AnnotationLayer = - AnnotationLayer(p.tracingId, AnnotationLayerType.fromProto(p.`type`), p.name, AnnotationLayerStatistics.unknown) + AnnotationLayer(p.tracingId, AnnotationLayerType.fromProto(p.typ), p.name, AnnotationLayerStatistics.unknown) val defaultSkeletonLayerName: String = "Skeleton" val defaultVolumeLayerName: String = "Volume" diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index 4ef34a60e1e..a831beafb49 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -19,5 +19,5 @@ message AnnotationProto { message AnnotationLayerProto { required string tracingId = 1; required string name = 2; - required AnnotationLayerTypeProto type = 3; + required AnnotationLayerTypeProto typ = 3; } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index 1725bf9109c..c0ab31de00d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -39,7 +39,7 @@ case class AddLayerAnnotationAction(layerParameters: AnnotationLayerParameters, case class DeleteLayerAnnotationAction(tracingId: String, layerName: String, // Just stored for nicer-looking history - `type`: AnnotationLayerType, // Just stored for nicer-looking history + typ: AnnotationLayerType, // Just stored for nicer-looking history actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 0176dec5255..8d99c5de3a0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -92,7 +92,7 @@ case class AnnotationWithTracings( annotationLayers = annotation.annotationLayers :+ AnnotationLayerProto( tracingId, a.layerParameters.name.getOrElse(AnnotationLayer.defaultNameForType(a.layerParameters.typ)), - `type` = AnnotationLayerType.toProto(a.layerParameters.typ) + typ = AnnotationLayerType.toProto(a.layerParameters.typ) )), tracingsById = tracingsById.updated(tracingId, tracing) ) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 374de4c250f..a3949708885 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -187,7 +187,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss .exists(_.name == action.layerParameters.getNameWithDefault)) ?~> "addLayer.nameInUse" _ <- bool2Fox( !annotationWithTracings.annotation.annotationLayers.exists( - _.`type` == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" + _.typ == AnnotationLayerTypeProto.Skeleton && action.layerParameters.typ == AnnotationLayerType.Skeleton)) ?~> "addLayer.onlyOneSkeletonAllowed" tracing <- remoteWebknossosClient.createTracingFor(annotationId, action.layerParameters, previousVersion = targetVersion - 1) @@ -384,9 +384,9 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def findTracingsForAnnotation(annotation: AnnotationProto)( implicit ec: ExecutionContext): Fox[AnnotationWithTracings] = { val skeletonTracingIds = - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) + annotation.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Skeleton).map(_.tracingId) val volumeTracingIds = - annotation.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume).map(_.tracingId) + annotation.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Volume).map(_.tracingId) for { skeletonTracings <- Fox.serialCombined(skeletonTracingIds.toList)(id => findSkeletonRaw(id, Some(annotation.version))) ?~> "findSkeletonRaw.failed" @@ -799,7 +799,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tc: TokenContext): Fox[AnnotationLayerProto] = for { newTracingId <- tracingIdMap.get(layer.tracingId) ?~> "duplicate unknown layer" - _ <- layer.`type` match { + _ <- layer.typ match { case AnnotationLayerTypeProto.Volume => duplicateVolumeTracing(annotationId, layer.tracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 49341e827b4..38cf7fe755d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -157,9 +157,8 @@ class TSAnnotationController @Inject()( for { annotations: Seq[AnnotationProto] <- annotationService.getMultiple(request.body) ?~> Messages( "annotation.notFound") - skeletonLayers = annotations.flatMap( - _.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Skeleton)) - volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.`type` == AnnotationLayerTypeProto.Volume)) + skeletonLayers = annotations.flatMap(_.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Skeleton)) + volumeLayers = annotations.flatMap(_.annotationLayers.filter(_.typ == AnnotationLayerTypeProto.Volume)) newSkeletonId = TracingId.generate newVolumeId = TracingId.generate mergedSkeletonName = SequenceUtils @@ -206,12 +205,12 @@ class TSAnnotationController @Inject()( _ => AnnotationLayerProto(name = mergedSkeletonName, tracingId = newSkeletonId, - `type` = AnnotationLayerTypeProto.Skeleton)) + typ = AnnotationLayerTypeProto.Skeleton)) mergedVolumeLayerOpt = mergedVolumeOpt.map( _ => AnnotationLayerProto(name = mergedVolumeName, tracingId = newVolumeId, - `type` = AnnotationLayerTypeProto.Volume)) + typ = AnnotationLayerTypeProto.Volume)) mergedLayers = Seq(mergedSkeletonLayerOpt, mergedVolumeLayerOpt).flatten firstAnnotation <- annotations.headOption.toFox mergedAnnotation = firstAnnotation From fc48bfc372f170c6d24a3f332e6b51e728a8c5bf Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 17 Dec 2024 11:46:54 +0100 Subject: [PATCH 331/361] fix applying extraSkeletonActions on older requested annotation versions --- .../annotation/TSAnnotationService.scala | 72 +++++++++++-------- 1 file changed, 41 insertions(+), 31 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index a3949708885..f1544fe987e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -85,7 +85,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = for { - newestMaterialized <- getNewestMaterialized(annotationId) ?~> "getNewestMaterialized.failed" + // First, fetch the very newest materialized (even if an older one was requested), to determine correct targetVersion + newestMaterialized <- getNewestMatchingMaterializedAnnotation(annotationId, version = None) ?~> "getNewestMaterialized.failed" targetVersion <- determineTargetVersion(annotationId, newestMaterialized, version) ?~> "determineTargetVersion.failed" // When requesting any other than the newest version, do not consider the changes final reportChangesToWk = version.isEmpty || version.contains(targetVersion) @@ -93,28 +94,23 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ => newInnerCache) updatedAnnotation <- materializedAnnotationInnerCache.getOrLoad( targetVersion, - _ => - getWithTracingsVersioned(annotationId, - newestMaterialized, - targetVersion, - reportChangesToWk = reportChangesToWk) + _ => getWithTracingsVersioned(annotationId, targetVersion, reportChangesToWk = reportChangesToWk) ) } yield updatedAnnotation - private def getWithTracingsVersioned( - annotationId: String, - newestMaterializedAnnotation: AnnotationProto, - version: Long, - reportChangesToWk: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = + private def getWithTracingsVersioned(annotationId: String, targetVersion: Long, reportChangesToWk: Boolean)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[AnnotationWithTracings] = for { - annotationWithTracings <- findTracingsForAnnotation(newestMaterializedAnnotation) ?~> "findTracingsForAnnotation.failed" + materializedAnnotation <- getNewestMatchingMaterializedAnnotation(annotationId, Some(targetVersion)) + annotationWithTracings <- findTracingsForAnnotation(materializedAnnotation) ?~> "findTracingsForAnnotation.failed" annotationWithTracingsAndMappings <- findEditableMappingsForAnnotation( annotationId, annotationWithTracings, - newestMaterializedAnnotation.version, - version // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters + materializedAnnotation.version, + targetVersion // Note: this targetVersion is used for the updater buffers, and is overwritten for each update group, see annotation.withNewUpdaters ) ?~> "findEditableMappingsForAnnotation.failed" - updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, version, reportChangesToWk) ?~> "applyUpdates.failed" + updated <- applyPendingUpdates(annotationWithTracingsAndMappings, annotationId, targetVersion, reportChangesToWk) ?~> "applyUpdates.failed" } yield updated def currentMaterializableVersion(annotationId: String): Fox[Long] = @@ -123,16 +119,25 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss def currentMaterializedVersion(annotationId: String): Fox[Long] = tracingDataStore.annotations.getVersion(annotationId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) - private def currentMaterializedSkeletonVersion(tracingId: String): Fox[Long] = - tracingDataStore.skeletons.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def newestMatchingMaterializedSkeletonVersion(tracingId: String, targetVersion: Long): Fox[Long] = + tracingDataStore.skeletons.getVersion(tracingId, + version = Some(targetVersion), + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) - private def currentMaterializedEditableMappingVersion(tracingId: String): Fox[Long] = - tracingDataStore.editableMappingsInfo.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) + private def newestMatchingMaterializedEditableMappingVersion(tracingId: String, targetVersion: Long): Fox[Long] = + tracingDataStore.editableMappingsInfo.getVersion(tracingId, + version = Some(targetVersion), + mayBeEmpty = Some(true), + emptyFallback = Some(0L)) - private def getNewestMaterialized(annotationId: String): Fox[AnnotationProto] = + private def getNewestMatchingMaterializedAnnotation(annotationId: String, + version: Option[Long]): Fox[AnnotationProto] = for { - keyValuePair <- tracingDataStore.annotations.get[AnnotationProto](annotationId, mayBeEmpty = Some(true))( - fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" + keyValuePair <- tracingDataStore.annotations.get[AnnotationProto]( + annotationId, + mayBeEmpty = Some(true), + version = version)(fromProtoBytes[AnnotationProto]) ?~> "getAnnotation.failed" } yield keyValuePair.value private def applyUpdate( @@ -301,8 +306,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def findPendingUpdates(annotationId: String, annotation: AnnotationWithTracings, desiredVersion: Long)( implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = for { - extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation) - extraEditableMappingUpdates <- findExtraEditableMappingUpdates(annotationId, annotation) + extraSkeletonUpdates <- findExtraSkeletonUpdates(annotationId, annotation, desiredVersion) + extraEditableMappingUpdates <- findExtraEditableMappingUpdates(annotationId, annotation, desiredVersion) existingVersion = annotation.version pendingAnnotationUpdates <- if (desiredVersion == existingVersion) Fox.successful(List.empty) else { @@ -319,12 +324,12 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss * we may fetch skeleton updates *older* than it, in order to fully construct the state of that version. * Only annotations from before that migration have this skeletonMayHavePendingUpdates=Some(true). */ - private def findExtraSkeletonUpdates(annotationId: String, annotation: AnnotationWithTracings)( + private def findExtraSkeletonUpdates(annotationId: String, annotation: AnnotationWithTracings, targetVersion: Long)( implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { annotation.getSkeletonId.map { skeletonId => for { - materializedSkeletonVersion <- currentMaterializedSkeletonVersion(skeletonId) + materializedSkeletonVersion <- newestMatchingMaterializedSkeletonVersion(skeletonId, targetVersion) extraUpdates <- if (materializedSkeletonVersion < annotation.version) { tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( annotationId, @@ -339,25 +344,30 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss private def filterSkeletonUpdates( updateGroups: List[(Long, List[UpdateAction])]): List[(Long, List[SkeletonUpdateAction])] = - updateGroups.map { + updateGroups.flatMap { case (version, updateGroup) => val updateGroupFiltered = updateGroup.flatMap { case a: SkeletonUpdateAction => Some(a) case _ => None } - (version, updateGroupFiltered) + if (updateGroupFiltered.nonEmpty) { + Some((version, updateGroupFiltered)) + } else None } // Same problem as with skeletons, see comment above // Note that the EditableMappingUpdaters are passed only the “oldVersion” that is the materialized annotation version // not the actual materialized editableMapping version, but that should yield the same data when loading from fossil. - private def findExtraEditableMappingUpdates(annotationId: String, annotation: AnnotationWithTracings)( - implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = + private def findExtraEditableMappingUpdates( + annotationId: String, + annotation: AnnotationWithTracings, + targetVersion: Long)(implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { for { updatesByEditableMapping <- Fox.serialCombined(annotation.getEditableMappingTracingIds) { tracingId => for { - materializedEditableMappingVersion <- currentMaterializedEditableMappingVersion(tracingId) + materializedEditableMappingVersion <- newestMatchingMaterializedEditableMappingVersion(tracingId, + targetVersion) extraUpdates <- if (materializedEditableMappingVersion < annotation.version) { tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple( annotationId, From df88f4d0665b7bc773ebe546f96a5602150d7bdf Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 17 Dec 2024 13:16:49 +0100 Subject: [PATCH 332/361] remove debug logging --- .../webknossos/tracingstore/annotation/TSAnnotationService.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index f1544fe987e..08586ac51dc 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -337,7 +337,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Some(materializedSkeletonVersion + 1))(fromJsonBytes[List[UpdateAction]]) } else Fox.successful(List.empty) extraSkeletonUpdates = filterSkeletonUpdates(extraUpdates) - _ = logger.info(s"${extraSkeletonUpdates.length} extraSkeletonUpdates") } yield extraSkeletonUpdates }.getOrElse(Fox.successful(List.empty)) } else Fox.successful(List.empty) From a8babc04cba6ad2ff14e0cd9730234d5d6743884 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 18 Dec 2024 11:11:21 +0100 Subject: [PATCH 333/361] WIP: switch migration from threading to multiprocessing --- .../migration.py | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 24d5c2c6f82..b4048aef54c 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -10,7 +10,7 @@ from rich.progress import track import msgspec import concurrent.futures -import threading +import multiprocessing from functools import partial import heapq import sys @@ -39,10 +39,10 @@ def __init__(self, args): self.dst_stub = None if not args.dry: self.dst_stub = connect_to_fossildb(args.dst, "destination") - self.done_count = None - self.done_count_lock = threading.Lock() - self.failure_count = 0 - self.failure_count_lock = threading.Lock() + self.done_count = multiprocessing.Value('i', 0) + self.done_count_lock = multiprocessing.Lock() + self.failure_count = multiprocessing.Value('i', 0) + self.failure_count_lock = multiprocessing.Lock() self.total_count = None self.before = 0 @@ -50,15 +50,15 @@ def run(self): self.before = time.time() annotations = self.read_annotation_list() self.setup_checkpoint_logging() - self.done_count = 0 - self.failure_count = 0 + self.done_count.value = 0 + self.failure_count.value = 0 self.total_count = len(annotations) - with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: - executor.map(self.migrate_annotation, annotations) - log_since(self.before, f"Migrating all the {self.total_count} things") - if self.failure_count > 0: - logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") + with concurrent.futures.ProcessPoolExecutor(max_workers=self.args.num_threads) as executor: + list(executor.map(self.migrate_annotation, annotations)) + log_since(self.before, f"Migrating all the {self.done_count.value} of {self.total_count} things") + if self.failure_count.value > 0: + logger.info(f"There were failures for {self.failure_count.value} annotations. See logs for details.") sys.exit(1) def migrate_annotation(self, annotation): @@ -86,13 +86,13 @@ def migrate_annotation(self, annotation): if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) checkpoint_logger.info(annotation['_id']) - except Exception: + except Exception as e: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: - self.failure_count += 1 + self.failure_count.value += 1 finally: with self.done_count_lock: - self.done_count += 1 + self.done_count.value += 1 def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map = {} @@ -555,8 +555,8 @@ def replace_before_first_slash(self, replacement_prefix: str, key) -> str: def get_progress(self) -> str: with self.done_count_lock: - done_count = self.done_count - percentage = 100.0 * done_count / self.total_count + done_count = self.done_count.value + percentage = 100.0 * done_count / self.total_count.value duration = time.time() - self.before if done_count > 0: etr = duration / done_count * (self.total_count - done_count) From d51a4c81143a9229ec1f8c90bb6cdc98df2abafa Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 11:04:57 +0100 Subject: [PATCH 334/361] re-add isViewOnlyChange for skeleton actions where it got lost --- .../tracings/skeleton/updating/SkeletonUpdateActions.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala index 1ca66e9f5cd..ebab91d847d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/updating/SkeletonUpdateActions.scala @@ -472,6 +472,8 @@ case class UpdateTreeGroupVisibilitySkeletonAction(treeGroupId: Option[Int], this.copy(actionAuthorId = authorId) override def withActionTracingId(newTracingId: String): LayerUpdateAction = this.copy(actionTracingId = newTracingId) + + override def isViewOnlyChange: Boolean = true } case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, @@ -496,6 +498,8 @@ case class UpdateTreeEdgesVisibilitySkeletonAction(treeId: Int, this.copy(actionAuthorId = authorId) override def withActionTracingId(newTracingId: String): LayerUpdateAction = this.copy(actionTracingId = newTracingId) + + override def isViewOnlyChange: Boolean = true } case class UpdateUserBoundingBoxesSkeletonAction(boundingBoxes: List[NamedBoundingBox], From de0bb16f987f8424e33469734dac0f6017dec449 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 11:34:20 +0100 Subject: [PATCH 335/361] reformat comment for TokenContext Co-authored-by: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> --- .../com/scalableminds/util/accesscontext/TokenContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala index 2a74b356bf4..37f155d57a3 100644 --- a/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala +++ b/util/src/main/scala/com/scalableminds/util/accesscontext/TokenContext.scala @@ -1,4 +1,4 @@ package com.scalableminds.util.accesscontext -// to be used in datastore and tracingstore to hand around tokens that were supplied with the request +// Used in datastore and tracingstore to hand around tokens that were supplied with the request case class TokenContext(userTokenOpt: Option[String]) From becc70109f58c5f61c2c7cf69097729e5e1ad3a5 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 11:50:13 +0100 Subject: [PATCH 336/361] unify function naming in RpcRequest --- app/mail/MailchimpClient.scala | 4 +- .../WKRemoteTracingStoreClient.scala | 12 +-- .../dataset/WKRemoteDataStoreClient.scala | 2 +- .../organization/OrganizationService.scala | 2 +- app/models/voxelytics/LokiClient.scala | 2 +- app/security/OpenIdConnectClient.scala | 2 +- .../webknossos/datastore/rpc/RPCRequest.scala | 80 ++++++++----------- .../services/DSRemoteWebknossosClient.scala | 12 +-- .../TSRemoteDatastoreClient.scala | 2 +- .../TSRemoteWebknossosClient.scala | 2 +- 10 files changed, 54 insertions(+), 66 deletions(-) diff --git a/app/mail/MailchimpClient.scala b/app/mail/MailchimpClient.scala index f5216d62afc..e8b55138901 100644 --- a/app/mail/MailchimpClient.scala +++ b/app/mail/MailchimpClient.scala @@ -39,7 +39,7 @@ class MailchimpClient @Inject()(wkConf: WkConf, rpc: RPC, multiUserDAO: MultiUse "LNAME" -> lastName, ) ) - rpc(uri).silent.withBasicAuth(conf.user, conf.password).put(userBody) + rpc(uri).silent.withBasicAuth(conf.user, conf.password).putJson(userBody) } def tagUser(user: User, tag: MailchimpTag): Unit = @@ -63,7 +63,7 @@ class MailchimpClient @Inject()(wkConf: WkConf, rpc: RPC, multiUserDAO: MultiUse val tagBody = Json.obj( "tags" -> List(Json.obj("name" -> MailchimpTag.format(tag), "status" -> "active")) ) - rpc(uri).silent.withBasicAuth(conf.user, conf.password).post(tagBody) + rpc(uri).silent.withBasicAuth(conf.user, conf.password).postJson(tagBody) } def tagsForMultiUser(multiUser: MultiUser)(implicit ec: ExecutionContext): Fox[List[MailchimpTag]] = diff --git a/app/models/annotation/WKRemoteTracingStoreClient.scala b/app/models/annotation/WKRemoteTracingStoreClient.scala index 30c85a586e7..9e7ac4fe912 100644 --- a/app/models/annotation/WKRemoteTracingStoreClient.scala +++ b/app/models/annotation/WKRemoteTracingStoreClient.scala @@ -107,7 +107,7 @@ class WKRemoteTracingStoreClient( .addQueryStringOptional("version", version.map(_.toString)) .addQueryStringOptional("datasetBoundingBox", datasetBoundingBox.map(_.toLiteral)) .addQueryString("isFromTask" -> isFromTask.toString) - .postWithProtoResponse[AnnotationProto]()(AnnotationProto) + .postEmptyWithProtoResponse[AnnotationProto]()(AnnotationProto) } // Used in task creation. History is dropped, new version will be zero. @@ -120,7 +120,7 @@ class WKRemoteTracingStoreClient( .addQueryStringOptional("editPosition", editPosition.map(_.toUriLiteral)) .addQueryStringOptional("editRotation", editRotation.map(_.toUriLiteral)) .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) - .postWithJsonResponse[String]() + .postEmptyWithJsonResponse[String]() // Used in task creation. History is dropped, new version will be zero. def duplicateVolumeTracing(volumeTracingId: String, @@ -135,7 +135,7 @@ class WKRemoteTracingStoreClient( .addQueryStringOptional("boundingBox", boundingBox.map(_.toLiteral)) .addQueryStringOptional("minMag", magRestrictions.minStr) .addQueryStringOptional("maxMag", magRestrictions.maxStr) - .postWithJsonResponse[String]() + .postEmptyWithJsonResponse[String]() def mergeAnnotationsByIds(annotationIds: List[String], newAnnotationId: ObjectId, @@ -167,7 +167,7 @@ class WKRemoteTracingStoreClient( _ = tracingDataSourceTemporaryStore.store(tracingId, dataSource) _ <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/initialDataMultiple").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .post(packedVolumeDataZips) + .postFile(packedVolumeDataZips) } yield tracingId } @@ -192,7 +192,7 @@ class WKRemoteTracingStoreClient( .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryStringOptional("minMag", magRestrictions.minStr) .addQueryStringOptional("maxMag", magRestrictions.maxStr) - .post(file) + .postFile(file) case _ => Fox.successful(()) } @@ -246,7 +246,7 @@ class WKRemoteTracingStoreClient( for { _ <- rpc(s"${tracingStore.url}/tracings/annotation/$annotationId/resetToBase").withLongTimeout .addQueryString("token" -> RpcTokenHolder.webknossosToken) - .post() + .postEmpty() } yield () } diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index ca30e34f7d5..bc8f4239e24 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -91,7 +91,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin _ <- rpc(s"${dataStore.url}/data/datasets/$organizationId/$datasetName") .addQueryString("token" -> userToken) .addQueryStringOptional("folderId", folderId.map(_.toString)) - .put(dataSource) + .putJson(dataSource) } yield () def hasSegmentIndexFile(organizationId: String, datasetName: String, layerName: String)( diff --git a/app/models/organization/OrganizationService.scala b/app/models/organization/OrganizationService.scala index 32d28a1f901..4f26e7b87b3 100644 --- a/app/models/organization/OrganizationService.scala +++ b/app/models/organization/OrganizationService.scala @@ -135,7 +135,7 @@ class OrganizationService @Inject()(organizationDAO: OrganizationDAO, def sendRPCToDataStore(dataStore: DataStore) = rpc(s"${dataStore.url}/data/triggers/createOrganizationDirectory") .addQueryString("token" -> dataStoreToken, "organizationId" -> organizationId) - .post() + .postEmpty() .futureBox for { diff --git a/app/models/voxelytics/LokiClient.scala b/app/models/voxelytics/LokiClient.scala index 4c0dbbd5572..51bbe1902f5 100644 --- a/app/models/voxelytics/LokiClient.scala +++ b/app/models/voxelytics/LokiClient.scala @@ -248,7 +248,7 @@ class LokiClient @Inject()(wkConf: WkConf, rpc: RPC, val system: ActorSystem)(im )) _ <- rpc(s"${conf.uri}/loki/api/v1/push").silent .addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType) - .post[JsValue](Json.obj("streams" -> streams)) + .postJson[JsValue](Json.obj("streams" -> streams)) } yield () } else { Fox.successful(()) diff --git a/app/security/OpenIdConnectClient.scala b/app/security/OpenIdConnectClient.scala index d554e65a5ef..555a0b7ee9c 100644 --- a/app/security/OpenIdConnectClient.scala +++ b/app/security/OpenIdConnectClient.scala @@ -64,7 +64,7 @@ class OpenIdConnectClient @Inject()(rpc: RPC, conf: WkConf)(implicit ec: Executi tokenResponse <- rpc(serverInfos.token_endpoint) .silentIf(!conf.SingleSignOn.OpenIdConnect.verboseLoggingEnabled) .withBasicAuthOpt(Some(oidcConfig.clientId), oidcConfig.clientSecret) - .postFormParseJson[OpenIdConnectTokenResponse]( + .postFormWithJsonResponse[OpenIdConnectTokenResponse]( Map( "grant_type" -> "authorization_code", "client_id" -> oidcConfig.clientId, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index d5676562d96..5e251fd26cd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -99,83 +99,59 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: extractBytesResponse(performRequest) } - def post(): Fox[WSResponse] = { + def postEmpty(): Fox[WSResponse] = { request = request.withMethod("POST") performRequest } - def post(file: File): Fox[WSResponse] = { - request = request.withBody(file).withMethod("POST") - performRequest - } - - def postFormParseJson[T: Reads](parameters: Map[String, String]): Fox[T] = { - request = request.withBody(parameters).withMethod("POST") - parseJsonResponse(performRequest) - } - - def postWithJsonResponse[T: Reads](): Fox[T] = { + def postEmptyWithJsonResponse[T: Reads](): Fox[T] = { request = request.withMethod("POST") parseJsonResponse(performRequest) } - def postJsonWithBytesResponse[T: Writes](body: T = Json.obj()): Fox[Array[Byte]] = { - request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") - extractBytesResponse(performRequest) + def postEmptyWithProtoResponse[T <: GeneratedMessage]()(companion: GeneratedMessageCompanion[T]): Fox[T] = { + request = request.withMethod("POST") + parseProtoResponse(performRequest)(companion) } - def post[T: Writes](body: T = Json.obj()): Fox[WSResponse] = { - request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") + def postFile(file: File): Fox[WSResponse] = { + request = request.withBody(file).withMethod("POST") performRequest } - def postWithJsonResponse[TW: Writes, TR: Reads](body: TW = Json.obj()): Fox[TR] = { - request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") + def postFormWithJsonResponse[T: Reads](parameters: Map[String, String]): Fox[T] = { + request = request.withBody(parameters).withMethod("POST") parseJsonResponse(performRequest) } - def put[T: Writes](body: T = Json.obj()): Fox[WSResponse] = { - request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("PUT") - performRequest - } - - def patch[T: Writes](body: T = Json.obj()): Fox[WSResponse] = { + def postJson[T: Writes](body: T): Fox[WSResponse] = { request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("PATCH") - performRequest - } - - def delete(): Fox[WSResponse] = { - request = request.withMethod("DELETE") + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") performRequest } - def postJsonWithJsonResponse[T: Writes, U: Reads](body: T = Json.obj()): Fox[U] = { + def postJsonWithJsonResponse[T: Writes, U: Reads](body: T): Fox[U] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") parseJsonResponse(performRequest) } - def postBytesWithBytesResponse(body: Array[Byte]): Fox[Array[Byte]] = { - request = request.withBody(body).withMethod("POST") + def postJsonWithBytesResponse[T: Writes](body: T): Fox[Array[Byte]] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") extractBytesResponse(performRequest) } - def postJsonWithProtoResponse[J: Writes, T <: GeneratedMessage](body: J = Json.obj())( + def postJsonWithProtoResponse[J: Writes, T <: GeneratedMessage](body: J)( companion: GeneratedMessageCompanion[T]): Fox[T] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") parseProtoResponse(performRequest)(companion) } - def postJson[J: Writes](body: J = Json.obj()): Fox[Unit] = { - request = - request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") - performRequest.map(_ => ()) + def postBytesWithBytesResponse(body: Array[Byte]): Fox[Array[Byte]] = { + request = request.withBody(body).withMethod("POST") + extractBytesResponse(performRequest) } def postProto[T <: GeneratedMessage](body: T): Fox[Unit] = { @@ -197,9 +173,21 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: parseProtoResponse(performRequest)(companion) } - def postWithProtoResponse[T <: GeneratedMessage]()(companion: GeneratedMessageCompanion[T]): Fox[T] = { - request = request.withMethod("POST") - parseProtoResponse(performRequest)(companion) + def putJson[T: Writes](body: T): Fox[WSResponse] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("PUT") + performRequest + } + + def patchJson[T: Writes](body: T): Fox[WSResponse] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("PATCH") + performRequest + } + + def delete(): Fox[WSResponse] = { + request = request.withMethod("DELETE") + performRequest } private def performRequest: Fox[WSResponse] = { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 01675ec3f93..60cad94bd56 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -65,12 +65,12 @@ class DSRemoteWebknossosClient @Inject()( private def reportStatus(): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/status") .addQueryString("key" -> dataStoreKey) - .patch(DataStoreStatus(ok = true, dataStoreUri, Some(reportUsedStorageEnabled))) + .patchJson(DataStoreStatus(ok = true, dataStoreUri, Some(reportUsedStorageEnabled))) def reportDataSource(dataSource: InboxDataSourceLike): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasource") .addQueryString("key" -> dataStoreKey) - .put(dataSource) + .putJson(dataSource) def getUnfinishedUploadsForUser(organizationName: String)(implicit tc: TokenContext): Fox[List[UnfinishedUpload]] = for { @@ -91,7 +91,7 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) .withTokenFromContext - .postWithJsonResponse[JsValue]() + .postEmptyWithJsonResponse[JsValue]() uploadedDatasetId <- (uploadedDatasetIdJson \ "id").validate[String].asOpt.toFox ?~> "uploadedDatasetId.invalid" } yield uploadedDatasetId @@ -99,7 +99,7 @@ class DSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasources") .addQueryString("key" -> dataStoreKey) .silent - .put(dataSources) + .putJson(dataSources) def reserveDataSourceUpload(info: ReserveUploadInformation)( implicit tc: TokenContext): Fox[ReserveAdditionalInformation] = @@ -107,11 +107,11 @@ class DSRemoteWebknossosClient @Inject()( reserveUploadInfo <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) .withTokenFromContext - .postWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) + .postJsonWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) } yield reserveUploadInfo def deleteDataSource(id: DataSourceId): Fox[_] = - rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).post(id) + rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).postJson(id) def getJobExportProperties(jobId: String): Fox[JobExportProperties] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/jobExportProperties") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 40a415b20a7..0a45219eb53 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -50,7 +50,7 @@ class TSRemoteDatastoreClient @Inject()( implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) - response <- rpc(s"$remoteLayerUri/data").withTokenFromContext.silent.post(dataRequests) + response <- rpc(s"$remoteLayerUri/data").withTokenFromContext.silent.postJson(dataRequests) _ <- bool2Fox(Status.isSuccessful(response.status)) bytes = response.bodyAsBytes.toArray indices <- parseMissingBucketHeader(response.header(missingBucketsHeader)) ?~> "failed to parse missing bucket header" diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 66f20ecc14c..46ee9b26d73 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -56,7 +56,7 @@ class TSRemoteWebknossosClient @Inject()( rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") .addQueryString("key" -> tracingStoreKey) .silent - .post(Json.toJson(tracingUpdatesReport)) + .postJson(Json.toJson(tracingUpdatesReport)) def getDataSourceForTracing(tracingId: String)(implicit tc: TokenContext): Fox[DataSourceLike] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSource") From b5e1f91864b98b5899337aef9e8b904196a58d69 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 12:05:38 +0100 Subject: [PATCH 337/361] fix userToken param, remove stateless services from Singleton list --- .../datastore/services/DSRemoteWebknossosClient.scala | 6 ++++-- .../webknossos/tracingstore/TracingStoreModule.scala | 2 -- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 60cad94bd56..08f7de6a201 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -111,7 +111,9 @@ class DSRemoteWebknossosClient @Inject()( } yield reserveUploadInfo def deleteDataSource(id: DataSourceId): Fox[_] = - rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).postJson(id) + rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset") + .addQueryString("key" -> dataStoreKey) + .postJson(id) def getJobExportProperties(jobId: String): Fox[JobExportProperties] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/jobExportProperties") @@ -148,7 +150,7 @@ class DSRemoteWebknossosClient @Inject()( _ => rpc(s"$webknossosUri/api/annotations/source/$accessToken") .addQueryString("key" -> dataStoreKey) - .withTokenFromContext + .addQueryStringOptional("userToken", tc.userTokenOpt) .getWithJsonResponse[AnnotationSource] ) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index e67aaddec71..412db33e262 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -17,7 +17,6 @@ class TracingStoreModule extends AbstractModule { override def configure(): Unit = { bind(classOf[ActorSystem]).annotatedWith(Names.named("webknossos-tracingstore")).toInstance(system) bind(classOf[TracingDataStore]).asEagerSingleton() - bind(classOf[TemporaryTracingService]).asEagerSingleton() bind(classOf[VolumeTracingService]).asEagerSingleton() bind(classOf[TracingStoreAccessTokenService]).asEagerSingleton() bind(classOf[TSRemoteWebknossosClient]).asEagerSingleton() @@ -25,7 +24,6 @@ class TracingStoreModule extends AbstractModule { bind(classOf[EditableMappingService]).asEagerSingleton() bind(classOf[TSSlackNotificationService]).asEagerSingleton() bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() - bind(classOf[AnnotationTransactionService]).asEagerSingleton() bind(classOf[TSAnnotationService]).asEagerSingleton() } From 46bac2890a526b7022d25ed59a515b4838c52179 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 13:25:30 +0100 Subject: [PATCH 338/361] rename some functions, remove unused imports --- .../webknossos/tracingstore/TracingStoreModule.scala | 4 ++-- .../annotation/AnnotationTransactionService.scala | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index 412db33e262..6f4b09826f0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -3,11 +3,11 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.AbstractModule import com.google.inject.name.Names import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder -import com.scalableminds.webknossos.tracingstore.annotation.{AnnotationTransactionService, TSAnnotationService} +import com.scalableminds.webknossos.tracingstore.annotation.TSAnnotationService import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService -import com.scalableminds.webknossos.tracingstore.tracings.{TemporaryTracingService, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import org.apache.pekko.actor.ActorSystem class TracingStoreModule extends AbstractModule { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index f8a6ac78a2b..4c05249408b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -67,7 +67,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe Some(expiry)) } yield () - private def handleUpdateGroupForTransaction( + private def handleUpdateGroupOfTransaction( annotationId: String, previousVersionFox: Fox[Long], updateGroup: UpdateActionGroup)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = @@ -177,7 +177,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe } else { updateGroups.foldLeft(annotationService.currentMaterializableVersion(annotationId)) { (currentCommittedVersionFox, updateGroup) => - handleUpdateGroupForTransaction(annotationId, currentCommittedVersionFox, updateGroup) + handleUpdateGroupOfTransaction(annotationId, currentCommittedVersionFox, updateGroup) } } @@ -259,12 +259,12 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe * ignore it silently. This is in case the frontend sends a retry if it believes a save to be unsuccessful * despite the backend receiving it just fine. */ - private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, tracingId: String, previousVersion: Long)( + private def failUnlessAlreadyHandled(updateGroup: UpdateActionGroup, annotationId: String, previousVersion: Long)( implicit ec: ExecutionContext): Fox[Long] = { val errorMessage = s"Incorrect version. Expected: ${previousVersion + 1}; Got: ${updateGroup.version}" for { _ <- Fox.assertTrue( - handledGroupIdStoreContains(tracingId, + handledGroupIdStoreContains(annotationId, updateGroup.transactionId, updateGroup.version, updateGroup.transactionGroupIndex)) ?~> errorMessage ~> CONFLICT From 933c1b0525a8b798d1979f2bd9e67ed2bff3ece3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 13:48:07 +0100 Subject: [PATCH 339/361] fix bucketMutatingActions lookup, rename emptystring to empty string --- app/models/dataset/ThumbnailService.scala | 2 +- test/backend/SqlEscapingTestSuite.scala | 2 +- webknossos-datastore/proto/Annotation.proto | 2 +- .../tracingstore/annotation/AnnotationTransactionService.scala | 2 +- .../tracingstore/annotation/AnnotationUpdateActions.scala | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 992fa688088..973ec4c7781 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -225,7 +225,7 @@ class ThumbnailDAO @Inject()(SQLClient: SqlClient)(implicit ec: ExecutionContext mimeType: String, mag: Vec3Int, mag1BoundingBox: BoundingBox): Fox[Unit] = { - val mappingName = mappingNameOpt.getOrElse("") // in sql, nullable columns can’t be primary key, so we encode no mapping with emptystring + val mappingName = mappingNameOpt.getOrElse("") // in sql, nullable columns can’t be primary key, so we encode no mapping with empty string for { _ <- run(q"""INSERT INTO webknossos.dataset_thumbnails ( _dataset, dataLayerName, width, height, mappingName, image, mimetype, mag, mag1BoundingBox, created) diff --git a/test/backend/SqlEscapingTestSuite.scala b/test/backend/SqlEscapingTestSuite.scala index b51cef65393..99e1cbe35e4 100644 --- a/test/backend/SqlEscapingTestSuite.scala +++ b/test/backend/SqlEscapingTestSuite.scala @@ -23,7 +23,7 @@ class SqlEscapingTestSuite extends PlaySpec with SqlTypeImplicits with SqlEscapi "handle null" in { assert(parseArrayLiteral(null) == List()) } - "handle emptystring" in { + "handle empty string" in { assert(parseArrayLiteral("") == List()) } "handle empty array literal" in { diff --git a/webknossos-datastore/proto/Annotation.proto b/webknossos-datastore/proto/Annotation.proto index a831beafb49..e36ad813f12 100644 --- a/webknossos-datastore/proto/Annotation.proto +++ b/webknossos-datastore/proto/Annotation.proto @@ -8,7 +8,7 @@ enum AnnotationLayerTypeProto { } message AnnotationProto { - required string description = 1; // emptystring encodes no description + required string description = 1; // empty string encodes no description required int64 version = 2; repeated AnnotationLayerProto annotationLayers = 3; required int64 earliestAccessibleVersion = 4; diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala index 4c05249408b..08a52611931 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationTransactionService.scala @@ -229,7 +229,7 @@ class AnnotationTransactionService @Inject()(handledGroupIdStore: TracingStoreRe tracing <- annotationService.findVolume(annotationId, volumeTracingId) _ <- volumeTracingService.applyBucketMutatingActions(volumeTracingId, tracing, - bucketMutatingActions, + actionsGrouped(volumeTracingId), updateActionGroup.version) } yield () } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala index c0ab31de00d..f81170d8f62 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationUpdateActions.scala @@ -67,7 +67,7 @@ case class UpdateLayerMetadataAnnotationAction(tracingId: String, } case class UpdateMetadataAnnotationAction( - description: Option[String], // None means do not change description. Emptystring means set to empty + description: Option[String], // None means do not change description. Empty string means set to empty actionTimestamp: Option[Long] = None, actionAuthorId: Option[String] = None, info: Option[String] = None) From 4dec1c76734eeda863cc89686366f4ce2939892d Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 13:50:32 +0100 Subject: [PATCH 340/361] reorder functions in AnnotationWithTracings --- .../annotation/AnnotationWithTracings.scala | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 8d99c5de3a0..33babe21ce8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -24,6 +24,10 @@ case class AnnotationWithTracings( editableMappingsByTracingId: Map[String, (EditableMappingInfo, EditableMappingUpdater)]) extends LazyLogging { + // Assumes that there is at most one skeleton layer per annotation. This is true as of this writing + def getSkeletonId: Option[String] = + getSkeletons.headOption.map(_._1) + def getSkeleton(tracingId: String): Box[SkeletonTracing] = for { tracingEither <- tracingsById.get(tracingId) @@ -33,28 +37,16 @@ case class AnnotationWithTracings( } } yield skeletonTracing - def getVolumes: List[(String, VolumeTracing)] = - tracingsById.view.flatMap { - case (id, Right(vt: VolumeTracing)) => Some(id, vt) - case _ => None - }.toList - def getSkeletons: List[(String, SkeletonTracing)] = tracingsById.view.flatMap { case (id, Left(st: SkeletonTracing)) => Some(id, st) case _ => None }.toList - // Assumes that there is at most one skeleton layer per annotation. This is true as of this writing - def getSkeletonId: Option[String] = - getSkeletons.headOption.map(_._1) - - def getEditableMappingTracingIds: List[String] = editableMappingsByTracingId.keys.toList - - def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = - editableMappingsByTracingId.view.flatMap { - case (id, (info: EditableMappingInfo, _)) => Some(id, info) - case _ => None + def getVolumes: List[(String, VolumeTracing)] = + tracingsById.view.flatMap { + case (id, Right(vt: VolumeTracing)) => Some(id, vt) + case _ => None }.toList def getVolume(tracingId: String): Box[VolumeTracing] = @@ -72,6 +64,14 @@ case class AnnotationWithTracings( case _ => None }.toList + def getEditableMappingTracingIds: List[String] = editableMappingsByTracingId.keys.toList + + def getEditableMappingsInfo: List[(String, EditableMappingInfo)] = + editableMappingsByTracingId.view.flatMap { + case (id, (info: EditableMappingInfo, _)) => Some(id, info) + case _ => None + }.toList + def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = for { (info, _) <- editableMappingsByTracingId.get(tracingId) From e09309221235d4b4bafa8a20a24d0745730733df Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 13:53:39 +0100 Subject: [PATCH 341/361] correctly clear editableMappings map, fix typo --- .../annotation/AnnotationWithTracings.scala | 5 +++-- .../annotation/TSAnnotationService.scala | 12 ++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index 33babe21ce8..d796d3f2cef 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -72,7 +72,7 @@ case class AnnotationWithTracings( case _ => None }.toList - def getEditableMappingInfo(tracingId: String): Box[EditableMappingInfo] = + def getEditableMappingInfo(tracingId: String): Option[EditableMappingInfo] = for { (info, _) <- editableMappingsByTracingId.get(tracingId) } yield info @@ -100,7 +100,8 @@ case class AnnotationWithTracings( def deleteLayer(a: DeleteLayerAnnotationAction): AnnotationWithTracings = this.copy( annotation = annotation.copy(annotationLayers = annotation.annotationLayers.filter(_.tracingId != a.tracingId)), - tracingsById = tracingsById.removed(a.tracingId) + tracingsById = tracingsById.removed(a.tracingId), + editableMappingsByTracingId = editableMappingsByTracingId.removed(a.tracingId) ) def updateLayerMetadata(a: UpdateLayerMetadataAnnotationAction): AnnotationWithTracings = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 08586ac51dc..5d0a1c09c47 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -523,13 +523,13 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updated <- updateIter( Some(annotationWithTracings.withNewUpdaters(annotationWithTracings.version, targetVersion)), updates) - updatedWithNewVerson = updated.withVersion(targetVersion) - _ <- updatedWithNewVerson.flushBufferedUpdates() - _ <- flushUpdatedTracings(updatedWithNewVerson, updates) - _ <- flushAnnotationInfo(annotationId, updatedWithNewVerson) + updatedWithNewVersion = updated.withVersion(targetVersion) + _ <- updatedWithNewVersion.flushBufferedUpdates() + _ <- flushUpdatedTracings(updatedWithNewVersion, updates) + _ <- flushAnnotationInfo(annotationId, updatedWithNewVersion) _ <- Fox.runIf(reportChangesToWk && annotationWithTracings.annotation != updated.annotation)( - remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVerson.annotation)) - } yield updatedWithNewVerson + remoteWebknossosClient.updateAnnotation(annotationId, updatedWithNewVersion.annotation)) + } yield updatedWithNewVersion } } From 79152b83966cadada7ded1377551104f2edcaa04 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 15:43:05 +0100 Subject: [PATCH 342/361] rename + rephrase some things for pr feedback --- .../annotation/AnnotationWithTracings.scala | 2 +- .../annotation/TSAnnotationService.scala | 10 +++++----- .../EditableMappingController.scala | 2 +- .../volume/VolumeTracingService.scala | 19 ++++++++++--------- 4 files changed, 17 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala index d796d3f2cef..d5f932b1c1d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/AnnotationWithTracings.scala @@ -164,7 +164,7 @@ case class AnnotationWithTracings( this.copy( editableMappingsByTracingId = editableMappingsByTracingId.updated(a.actionTracingId, (updated, updater))) - def flushBufferedUpdates()(implicit ec: ExecutionContext): Fox[Unit] = { + def flushEditableMappingUpdaterBuffers()(implicit ec: ExecutionContext): Fox[Unit] = { val updaters = editableMappingsByTracingId.values.map(_._2).toList for { _ <- Fox.serialCombined(updaters)(updater => updater.flushBuffersToFossil()) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 5d0a1c09c47..c5e653175c2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -219,7 +219,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = { // Note: works only if reset actions are in separate update groups - val sourceVersion = 0L // Tasks are always created with as v0 currently + val sourceVersion = 0L // Tasks are currently always created with v0 val before = Instant.now for { sourceAnnotation: AnnotationWithTracings <- getWithTracings(annotationId, Some(sourceVersion)) @@ -320,7 +320,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss /* * The migration of https://github.com/scalableminds/webknossos/pull/7917 does not guarantee that the skeleton layer - * is materialized at the same version as the annottation. So even if we have an existing annotation version, + * is materialized at the same version as the annotation. So even if we have an existing annotation version, * we may fetch skeleton updates *older* than it, in order to fully construct the state of that version. * Only annotations from before that migration have this skeletonMayHavePendingUpdates=Some(true). */ @@ -361,7 +361,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss annotationId: String, annotation: AnnotationWithTracings, targetVersion: Long)(implicit ec: ExecutionContext): Fox[List[(Long, List[UpdateAction])]] = - if (annotation.annotation.skeletonMayHavePendingUpdates.getOrElse(false)) { + if (annotation.annotation.editableMappingsMayHavePendingUpdates.getOrElse(false)) { for { updatesByEditableMapping <- Fox.serialCombined(annotation.getEditableMappingTracingIds) { tracingId => for { @@ -524,7 +524,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss Some(annotationWithTracings.withNewUpdaters(annotationWithTracings.version, targetVersion)), updates) updatedWithNewVersion = updated.withVersion(targetVersion) - _ <- updatedWithNewVersion.flushBufferedUpdates() + _ <- updatedWithNewVersion.flushEditableMappingUpdaterBuffers() _ <- flushUpdatedTracings(updatedWithNewVersion, updates) _ <- flushAnnotationInfo(annotationId, updatedWithNewVersion) _ <- Fox.runIf(reportChangesToWk && annotationWithTracings.annotation != updated.annotation)( @@ -556,7 +556,7 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss case _ => Fox.successful(()) } _ <- Fox.serialCombined(annotationWithTracings.getSkeletons) { - case (skeletonTracingId, skeletonTracing: SkeletonTracing) + case (skeletonTracingId, skeletonTracing) if allMayHaveUpdates || tracingIdsWithUpdates.contains(skeletonTracingId) => tracingDataStore.skeletons.put(skeletonTracingId, skeletonTracing.version, skeletonTracing) case _ => Fox.successful(()) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index 925e009ea7d..ad3c3898fff 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -132,7 +132,7 @@ class EditableMappingController @Inject()( for { annotationId <- remoteWebknossosClient.getAnnotationIdForTracing(tracingId) tracing <- annotationService.findVolume(annotationId, tracingId) - _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" + _ <- editableMappingService.assertTracingHasEditableMapping(tracing) editableMappingInfo <- annotationService.findEditableMappingInfo(annotationId, tracingId) remoteFallbackLayer <- volumeTracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback(tracingId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3cf76c3a36a..2b4128c436f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -72,6 +72,7 @@ class VolumeTracingService @Inject()( adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService + // (tracingId, fallbackLayerNameOpt, userTokenOpt) → remoteFallbackLayerOpt private val fallbackLayerCache: AlfuCache[(String, Option[String], Option[String]), Option[RemoteFallbackLayer]] = AlfuCache(maxCapacity = 100) @@ -152,7 +153,7 @@ class VolumeTracingService @Inject()( dataLayer = volumeTracingLayer(tracingId, volumeTracing) actionBucketData <- action.base64Data.map(Base64.getDecoder.decode).toFox _ <- saveBucket(dataLayer, bucketPosition, actionBucketData, updateGroupVersion) ?~> "failed to save bucket" - mappingName <- selectMappingName(volumeTracing) + mappingName <- getMappingNameUnlessEditable(volumeTracing) _ <- Fox.runIfOptionTrue(volumeTracing.hasSegmentIndex) { for { previousBucketBytes <- loadBucket(dataLayer, bucketPosition, Some(updateGroupVersion - 1L)).futureBox @@ -172,9 +173,9 @@ class VolumeTracingService @Inject()( def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = if (tracing.getHasEditableMapping) Some(tracingId) else None - private def selectMappingName(tracing: VolumeTracing): Fox[Option[String]] = + private def getMappingNameUnlessEditable(tracing: VolumeTracing): Fox[Option[String]] = if (tracing.getHasEditableMapping) - Fox.failure("mappingName called on volumeTracing with editableMapping!") + Fox.failure("getMappingNameUnlessEditable called on volumeTracing with editableMapping!") else Fox.successful(tracing.mappingName) private def deleteSegmentData(tracingId: String, @@ -192,7 +193,7 @@ class VolumeTracingService @Inject()( } else { possibleAdditionalCoordinates.toList } - mappingName <- selectMappingName(volumeTracing) + mappingName <- getMappingNameUnlessEditable(volumeTracing) _ <- Fox.serialCombined(volumeTracing.mags.toList)(magProto => Fox.serialCombined(additionalCoordinateList)(additionalCoordinates => { val mag = vec3IntFromProto(magProto) @@ -265,7 +266,7 @@ class VolumeTracingService @Inject()( fallbackLayer, dataLayer.additionalAxes, tc) - mappingName <- selectMappingName(sourceTracing) + mappingName <- getMappingNameUnlessEditable(sourceTracing) _ <- Fox.serialCombined(bucketStreamBeforeRevert) { case (bucketPosition, dataBeforeRevert, version) => if (version > sourceVersion) { @@ -325,7 +326,7 @@ class VolumeTracingService @Inject()( _ = if (magSet.nonEmpty) magSets.add(magSet) } yield () } - mappingName <- selectMappingName(tracing) + mappingName <- getMappingNameUnlessEditable(tracing) mags <- // if none of the tracings contained any volume data do not save buckets, use full mag list, as already initialized on wk-side if (magSets.isEmpty) @@ -390,7 +391,7 @@ class VolumeTracingService @Inject()( val savedMags = new mutable.HashSet[Vec3Int]() for { fallbackLayer <- getFallbackLayer(tracingId, tracing) - mappingName <- selectMappingName(tracing) + mappingName <- getMappingNameUnlessEditable(tracing) segmentIndexBuffer = new VolumeSegmentIndexBuffer( tracingId, volumeSegmentIndexClient, @@ -549,7 +550,7 @@ class VolumeTracingService @Inject()( AdditionalAxis.fromProtosAsOpt(sourceTracing.additionalAxes), tc ) - mappingName <- selectMappingName(sourceTracing) + mappingName <- getMappingNameUnlessEditable(sourceTracing) _ <- Fox.serialCombined(buckets) { case (bucketPosition, bucketData) => if (newTracing.mags.contains(vec3IntToProto(bucketPosition.mag))) { @@ -840,7 +841,7 @@ class VolumeTracingService @Inject()( tracing.elementClass) dataLayer = volumeTracingLayer(tracingId, tracing) fallbackLayer <- getFallbackLayer(tracingId, tracing) - mappingName <- selectMappingName(tracing) + mappingName <- getMappingNameUnlessEditable(tracing) segmentIndexBuffer <- Fox.successful( new VolumeSegmentIndexBuffer(tracingId, volumeSegmentIndexClient, From aea24dd8dc3e84a6031e60db0722f46469b4f0ea Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 16:19:48 +0100 Subject: [PATCH 343/361] add some error messages for editable mapping routes --- conf/messages | 2 ++ .../controllers/EditableMappingController.scala | 4 ++-- .../tracings/volume/VolumeTracingService.scala | 12 ++++++------ 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/conf/messages b/conf/messages index b430154c836..ae3670381f1 100644 --- a/conf/messages +++ b/conf/messages @@ -251,6 +251,8 @@ annotation.deleteLayer.onlyLayer=Could not delete layer because it is the only l annotation.layer.notFound=Layer could not be found. annotation.getNewestVersion.failed=Could not get the newest version information for this annotation layer annotation.idForTracing.failed=Could not find the annotation id for this tracing id. +annotation.editableMapping.getAgglomerateGraph.failed=Could not look up an agglomerate graph for requested agglomerate. +annotation.editableMapping.getAgglomerateIdsForSegments.failed=Could not look up agglomerate ids for requested segments. mesh.notFound=Mesh could not be found mesh.write.failed=Failed to convert mesh info to json diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala index ad3c3898fff..915e717178b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/EditableMappingController.scala @@ -56,7 +56,7 @@ class EditableMappingController @Inject()( segmentIds <- agglomerateGraphBox match { case Full(agglomerateGraph) => Fox.successful(agglomerateGraph.segments) case Empty => Fox.successful(List.empty) - case f: Failure => f.toFox + case f: Failure => f.toFox ?~> "annotation.editableMapping.getAgglomerateGraph.failed" } agglomerateIdIsPresent = agglomerateGraphBox.isDefined } yield Ok(Json.toJson(EditableMappingSegmentListResult(segmentIds.toList, agglomerateIdIsPresent))) @@ -79,7 +79,7 @@ class EditableMappingController @Inject()( editableMappingInfo, annotation.version, tracingId, - remoteFallbackLayer) + remoteFallbackLayer) ?~> "annotation.editableMapping.getAgglomerateIdsForSegments.failed" agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 2b4128c436f..c6befe22017 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -493,16 +493,16 @@ class VolumeTracingService @Inject()( val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, isFromTask, datasetBoundingBox) val tracingWithMagRestrictions = VolumeTracingMags.restrictMagList(tracingWithBB, magRestrictions) for { - fallbackLayer <- getFallbackLayer(sourceTracingId, sourceTracing) + fallbackLayer <- getFallbackLayer(sourceTracingId, tracingWithMagRestrictions) hasSegmentIndex <- VolumeSegmentIndexService.canHaveSegmentIndex(remoteDatastoreClient, fallbackLayer) newTracing = tracingWithMagRestrictions.copy( createdTimestamp = System.currentTimeMillis(), - editPosition = editPosition.map(vec3IntToProto).getOrElse(sourceTracing.editPosition), - editRotation = editRotation.map(vec3DoubleToProto).getOrElse(sourceTracing.editRotation), - boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(sourceTracing.boundingBox), + editPosition = editPosition.map(vec3IntToProto).getOrElse(tracingWithMagRestrictions.editPosition), + editRotation = editRotation.map(vec3DoubleToProto).getOrElse(tracingWithMagRestrictions.editRotation), + boundingBox = boundingBoxOptToProto(boundingBox).getOrElse(tracingWithMagRestrictions.boundingBox), mappingName = - if (sourceTracing.getHasEditableMapping) Some(newTracingId) - else sourceTracing.mappingName, + if (tracingWithMagRestrictions.getHasEditableMapping) Some(newTracingId) + else tracingWithMagRestrictions.mappingName, version = newVersion, // Adding segment index on duplication if the volume tracing allows it. This will be used in duplicateData hasSegmentIndex = Some(hasSegmentIndex) From 2e39776327b6682395c68382c6c16a98fc3879fc Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 16:25:39 +0100 Subject: [PATCH 344/361] add comment on merging editable mappings, move up saveSkeleton call in mergedFromIds --- .../tracingstore/annotation/TSAnnotationService.scala | 9 +++++++++ .../controllers/TSAnnotationController.scala | 4 ++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index c5e653175c2..967c8ca63e8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -918,6 +918,15 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield updatesByAnnotation.flatten + /* + * Merging editable mappings is complex because it is not defined on the materialized values (as with skeleton + volume), + * but rather on the update actions. + * We apply all updates from the first annotation, and then all updates from the second annotation. + * Everything is looked up by click position so that everything is defined. + * This means that we also need to store all the editable mapping updates in the merged annotation + * So that it itself can be merged again. + * The earliestAccessibleVersion property ensures that the fully merged annotation is still the earliest accessible one. + */ def mergeEditableMappings(annotationIds: List[String], newAnnotationId: String, newVolumeTracingId: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 38cf7fe755d..ec7f28ec940 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -201,6 +201,8 @@ class TSAnnotationController @Inject()( .map(_.flatten) mergedSkeletonOpt <- Fox.runIf(skeletonTracings.nonEmpty)( skeletonTracingService.merge(skeletonTracings, newVersion = newTargetVersion).toFox) + _ <- Fox.runOptional(mergedSkeletonOpt)( + skeletonTracingService.saveSkeleton(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) mergedSkeletonLayerOpt = mergedSkeletonOpt.map( _ => AnnotationLayerProto(name = mergedSkeletonName, @@ -217,8 +219,6 @@ class TSAnnotationController @Inject()( .withAnnotationLayers(mergedLayers) .withEarliestAccessibleVersion(newTargetVersion) .withVersion(newTargetVersion) - _ <- Fox.runOptional(mergedSkeletonOpt)( - skeletonTracingService.saveSkeleton(_, Some(newSkeletonId), version = newTargetVersion, toTemporaryStore)) _ <- annotationService.saveAnnotationProto(newAnnotationId, newTargetVersion, mergedAnnotation, From f03184e64593d6ccf187a03ee93ea33326c1f7e0 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 16:35:22 +0100 Subject: [PATCH 345/361] Remove superfluous empty list in SequenceUtils.splitAndIsolate --- .../UpdateGroupHandlingUnitTestSuite.scala | 22 +++++++++++++++++++ .../scala/collections/SequenceUtils.scala | 1 + 2 files changed, 23 insertions(+) diff --git a/test/backend/UpdateGroupHandlingUnitTestSuite.scala b/test/backend/UpdateGroupHandlingUnitTestSuite.scala index 3012f03159d..bb553c9db36 100644 --- a/test/backend/UpdateGroupHandlingUnitTestSuite.scala +++ b/test/backend/UpdateGroupHandlingUnitTestSuite.scala @@ -34,6 +34,28 @@ class UpdateGroupHandlingUnitTestSuite extends PlaySpec with UpdateGroupHandling assert(res(1)._2.length == 1) assert(res(1)._1 == 6L) } + + "work if last element is isolationSensitive" in { + val updateGroupsBefore = List( + (5L, + List( + MergeTreeSkeletonAction(sourceId = 1, targetId = 2, actionTracingId = Dummies.tracingId), + MergeTreeSkeletonAction(sourceId = 2, targetId = 3, actionTracingId = Dummies.tracingId) + )), + (6L, + List( + RevertToVersionAnnotationAction(sourceVersion = 1) + )), + (7L, + List( + RevertToVersionAnnotationAction(sourceVersion = 1) + )) + ) + val res = regroupByIsolationSensitiveActions(updateGroupsBefore) + assert(res.length == 3) + assert(res(1)._2.length == 1) + assert(res(1)._1 == 6L) + } } "ironOutReverts" should { diff --git a/util/src/main/scala/collections/SequenceUtils.scala b/util/src/main/scala/collections/SequenceUtils.scala index 93978cbd9ba..f0861fe7af9 100644 --- a/util/src/main/scala/collections/SequenceUtils.scala +++ b/util/src/main/scala/collections/SequenceUtils.scala @@ -32,6 +32,7 @@ object SequenceUtils { } } } + .filter(_.nonEmpty) // Remove empty lists. We create one in case the last element satisfies the predicate. .reverse // we prepended on the outer list (for perf reasons) .map(_.reverse) // we prepended on the inner lists (for perf reasons) From a5ecb6f911f9493028dc82222bd91ec160860b48 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 2 Jan 2025 16:57:04 +0100 Subject: [PATCH 346/361] extract EditableMappingMergeService --- .../annotation/TSAnnotationService.scala | 72 ------------- .../controllers/TSAnnotationController.scala | 4 +- .../EditableMappingMergeService.scala | 102 ++++++++++++++++++ .../EditableMappingService.scala | 3 +- .../EditableMappingUpdater.scala | 4 +- 5 files changed, 107 insertions(+), 78 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingMergeService.scala diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 967c8ca63e8..0fcbee49b5a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -452,7 +452,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss tc, remoteDatastoreClient, editableMappingService, - this, tracingDataStore ) @@ -902,75 +901,4 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss _ <- tracingDataStore.skeletons.put(newTracingId, newVersion, adaptedSkeleton) } yield newTracingId - private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( - implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = - for { - updatesByAnnotation <- Fox.serialCombined(annotationIds) { annotationId => - for { - updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( - fromJsonBytes[List[UpdateAction]]) - updatesIroned: Seq[UpdateAction] = ironOutReverts(updateGroups) - editableMappingUpdates = updatesIroned.flatMap { - case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) - case _ => None - } - } yield editableMappingUpdates - } - } yield updatesByAnnotation.flatten - - /* - * Merging editable mappings is complex because it is not defined on the materialized values (as with skeleton + volume), - * but rather on the update actions. - * We apply all updates from the first annotation, and then all updates from the second annotation. - * Everything is looked up by click position so that everything is defined. - * This means that we also need to store all the editable mapping updates in the merged annotation - * So that it itself can be merged again. - * The earliestAccessibleVersion property ensures that the fully merged annotation is still the earliest accessible one. - */ - def mergeEditableMappings(annotationIds: List[String], - newAnnotationId: String, - newVolumeTracingId: String, - tracingsWithIds: List[(VolumeTracing, String)], - toTemporaryStore: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = - if (tracingsWithIds.nonEmpty && tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { - for { - before <- Instant.nowFox - _ <- bool2Fox(!toTemporaryStore) ?~> "Cannot merge editable mappings to temporary store (trying to merge compound annotations?)" - remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => - remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) - remoteFallbackLayer <- SequenceUtils.findUniqueElement(remoteFallbackLayers) ?~> "Cannot merge editable mappings based on different dataset layers" - editableMappingInfos <- Fox.serialCombined(tracingsWithIds) { tracingWithId => - tracingDataStore.editableMappingsInfo.get(tracingWithId._2)(fromProtoBytes[EditableMappingInfo]) - } - baseMappingName <- SequenceUtils.findUniqueElement(editableMappingInfos.map(_.value.baseMappingName)) ?~> "Cannot merge editable mappings based on different base mappings" - linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, - newVolumeTracingId) - targetVersion = linearizedEditableMappingUpdates.length - _ <- Fox.runIf(!toTemporaryStore) { - var updateVersion = 1L - Fox.serialCombined(linearizedEditableMappingUpdates) { update: UpdateAction => - for { - _ <- tracingDataStore.annotationUpdates.put(newVolumeTracingId, updateVersion, Json.toJson(List(update))) - _ = updateVersion += 1 - } yield () - } - } - editableMappingInfo = editableMappingService.create(baseMappingName) - updater = editableMappingUpdaterFor(newAnnotationId, - newVolumeTracingId, - remoteFallbackLayer, - editableMappingInfo, - 0L, - targetVersion) - _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) - _ = Instant.logSince( - before, - s"Merging ${tracingsWithIds.length} editable mappings by applying ${linearizedEditableMappingUpdates.length} updates") - } yield targetVersion - } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { - Fox.empty - } else { - Fox.failure("Cannot merge annotations with and without editable mappings") - } - } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index ec7f28ec940..0b982410ed3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -21,6 +21,7 @@ import com.scalableminds.webknossos.tracingstore.annotation.{ } import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings._ +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingMergeService import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import net.liftweb.common.{Empty, Failure, Full} @@ -34,6 +35,7 @@ class TSAnnotationController @Inject()( accessTokenService: TracingStoreAccessTokenService, slackNotificationService: TSSlackNotificationService, annotationService: TSAnnotationService, + editableMappingMergeService: EditableMappingMergeService, annotationTransactionService: AnnotationTransactionService, skeletonTracingService: SkeletonTracingService, volumeTracingService: VolumeTracingService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) @@ -172,7 +174,7 @@ class TSAnnotationController @Inject()( Some(TracingSelector(l.tracingId)) }) .map(_.flatten) - mergeEditableMappingsResultBox <- annotationService + mergeEditableMappingsResultBox <- editableMappingMergeService .mergeEditableMappings(request.body, newAnnotationId, newVolumeId, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingMergeService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingMergeService.scala new file mode 100644 index 00000000000..df1819e1b1b --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingMergeService.scala @@ -0,0 +1,102 @@ +package com.scalableminds.webknossos.tracingstore.tracings.editablemapping + +import collections.SequenceUtils +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.EditableMappingInfo.EditableMappingInfo +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} +import com.scalableminds.webknossos.tracingstore.annotation.{UpdateAction, UpdateGroupHandling} +import com.scalableminds.webknossos.tracingstore.tracings.{FallbackDataHelper, KeyValueStoreImplicits, TracingDataStore} +import play.api.libs.json.Json + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class EditableMappingMergeService @Inject()(val tracingDataStore: TracingDataStore, + val remoteDatastoreClient: TSRemoteDatastoreClient, + val remoteWebknossosClient: TSRemoteWebknossosClient, + editableMappingService: EditableMappingService) + extends KeyValueStoreImplicits + with UpdateGroupHandling + with FallbackDataHelper { + + /* + * Merging editable mappings is complex because it is not defined on the materialized values (as with skeleton + volume), + * but rather on the update actions. + * We apply all updates from the first annotation, and then all updates from the second annotation. + * Everything is looked up by click position so that everything is defined. + * This means that we also need to store all the editable mapping updates in the merged annotation + * So that it itself can be merged again. + * The earliestAccessibleVersion property ensures that the fully merged annotation is still the earliest accessible one. + */ + def mergeEditableMappings(annotationIds: List[String], + newAnnotationId: String, + newVolumeTracingId: String, + tracingsWithIds: List[(VolumeTracing, String)], + toTemporaryStore: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Long] = + if (tracingsWithIds.nonEmpty && tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { + for { + before <- Instant.nowFox + _ <- bool2Fox(!toTemporaryStore) ?~> "Cannot merge editable mappings to temporary store (trying to merge compound annotations?)" + remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => + remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) + remoteFallbackLayer <- SequenceUtils.findUniqueElement(remoteFallbackLayers) ?~> "Cannot merge editable mappings based on different dataset layers" + editableMappingInfos <- Fox.serialCombined(tracingsWithIds) { tracingWithId => + tracingDataStore.editableMappingsInfo.get(tracingWithId._2)(fromProtoBytes[EditableMappingInfo]) + } + baseMappingName <- SequenceUtils.findUniqueElement(editableMappingInfos.map(_.value.baseMappingName)) ?~> "Cannot merge editable mappings based on different base mappings" + linearizedEditableMappingUpdates: List[UpdateAction] <- mergeEditableMappingUpdates(annotationIds, + newVolumeTracingId) + targetVersion = linearizedEditableMappingUpdates.length + _ <- Fox.runIf(!toTemporaryStore) { + var updateVersion = 1L + Fox.serialCombined(linearizedEditableMappingUpdates) { update: UpdateAction => + for { + _ <- tracingDataStore.annotationUpdates.put(newVolumeTracingId, updateVersion, Json.toJson(List(update))) + _ = updateVersion += 1 + } yield () + } + } + editableMappingInfo = editableMappingService.create(baseMappingName) + updater = new EditableMappingUpdater( + newAnnotationId, + newVolumeTracingId, + editableMappingInfo.baseMappingName, + 0L, + targetVersion, + remoteFallbackLayer, + tc, + remoteDatastoreClient, + editableMappingService, + tracingDataStore + ) + _ <- updater.applyUpdatesAndSave(editableMappingInfo, linearizedEditableMappingUpdates) + _ = Instant.logSince( + before, + s"Merging ${tracingsWithIds.length} editable mappings by applying ${linearizedEditableMappingUpdates.length} updates") + } yield targetVersion + } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { + Fox.empty + } else { + Fox.failure("Cannot merge annotations with and without editable mappings") + } + + private def mergeEditableMappingUpdates(annotationIds: List[String], newTracingId: String)( + implicit ec: ExecutionContext): Fox[List[EditableMappingUpdateAction]] = + for { + updatesByAnnotation <- Fox.serialCombined(annotationIds) { annotationId => + for { + updateGroups <- tracingDataStore.annotationUpdates.getMultipleVersionsAsVersionValueTuple(annotationId)( + fromJsonBytes[List[UpdateAction]]) + updatesIroned: Seq[UpdateAction] = ironOutReverts(updateGroups) + editableMappingUpdates = updatesIroned.flatMap { + case a: EditableMappingUpdateAction => Some(a.withActionTracingId(newTracingId)) + case _ => None + } + } yield editableMappingUpdates + } + } yield updatesByAnnotation.flatten +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 251e9fc509a..33f9cc81a6d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -280,7 +280,7 @@ class EditableMappingService @Inject()( agglomerateGraphBox <- getAgglomerateGraphForId(tracingId, version, agglomerateId).futureBox skeletonBytes <- agglomerateGraphBox match { case Full(agglomerateGraph) => - Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, remoteFallbackLayer, agglomerateId)) + Fox.successful(agglomerateGraphToSkeleton(tracingId, agglomerateGraph, agglomerateId)) case Empty => remoteDatastoreClient.getAgglomerateSkeleton(remoteFallbackLayer, editableMappingInfo.baseMappingName, @@ -291,7 +291,6 @@ class EditableMappingService @Inject()( private def agglomerateGraphToSkeleton(tracingId: String, graph: AgglomerateGraph, - remoteFallbackLayer: RemoteFallbackLayer, agglomerateId: Long): Array[Byte] = { val nodeIdStartAtOneOffset = 1 val nodes = graph.positions.zipWithIndex.map { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index bdce3b46fb7..ea2a6f620e8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateChunkProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient -import com.scalableminds.webknossos.tracingstore.annotation.{TSAnnotationService, UpdateAction} +import com.scalableminds.webknossos.tracingstore.annotation.UpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume.ReversionHelper import com.scalableminds.webknossos.tracingstore.tracings.{ KeyValueStoreImplicits, @@ -40,7 +40,6 @@ class EditableMappingUpdater( tokenContext: TokenContext, remoteDatastoreClient: TSRemoteDatastoreClient, editableMappingService: EditableMappingService, - annotationService: TSAnnotationService, tracingDataStore: TracingDataStore ) extends KeyValueStoreImplicits with ReversionHelper @@ -417,7 +416,6 @@ class EditableMappingUpdater( tokenContext, remoteDatastoreClient, editableMappingService, - annotationService, tracingDataStore ) } From 0eb7b58a6a1ec3acdb4364270e859fd5b3dd326e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 3 Jan 2025 16:03:09 +0100 Subject: [PATCH 347/361] incorporate pr feedback --- frontend/javascripts/admin/admin_rest_api.ts | 4 +--- frontend/javascripts/oxalis/model/sagas/proofread_saga.ts | 6 +----- .../oxalis/view/right-border-tabs/dataset_info_tab_view.tsx | 2 +- .../javascripts/test/fixtures/tasktracing_server_objects.ts | 1 - 4 files changed, 3 insertions(+), 10 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 5fe11e13db5..6f46037c19c 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1934,11 +1934,9 @@ export async function getAgglomeratesForSegmentsFromDatastore, ): Promise { - const params = new URLSearchParams(); - const segmentIdBuffer = serializeProtoListOfLong(segmentIds); const listArrayBuffer: ArrayBuffer = await doWithToken((token) => { - params.append("token", token); + const params = new URLSearchParams({ token }); return Request.receiveArraybuffer( `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?${params}`, { diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index 8f38a2f68bc..b4d744c2dc1 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -268,12 +268,8 @@ function* createEditableMapping(): Saga { // This should never occur, because the proofreading tool is only available when a volume tracing layer is active. throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); } - const upToDateVolumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); - if (upToDateVolumeTracing == null) { - throw new Error("No active segmentation tracing layer. Cannot create editable mapping."); - } - const volumeTracingId = upToDateVolumeTracing.tracingId; + const volumeTracingId = volumeTracing.tracingId; const layerName = volumeTracingId; const baseMappingName = volumeTracing.mappingName; yield* put(setMappingNameAction(layerName, volumeTracingId, "HDF5")); diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index e86d6c07f6b..cbd95bb2841 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -227,7 +227,7 @@ export function AnnotationStats({ {asInfoBlock &&

Statistics

}
- {stats.treeCount} {formatLabel(pluralize("Tree", stats.treeCount))} + {skeletonStats.treeCount} {formatLabel(pluralize("Tree", skeletonStats.treeCount))} @@ -236,7 +253,7 @@ export function AnnotationStats({ /> - {stats.segmentCount} {formatLabel(pluralize("Segment", stats.segmentCount))} + {totalSegmentCount} {formatLabel(pluralize("Segment", totalSegmentCount))}
- {skeletonStats && "treeCount" in skeletonStats ? ( + {skeletonStats ? ( Date: Fri, 3 Jan 2025 16:04:58 +0100 Subject: [PATCH 348/361] extract actionTracingId in spec --- .../test/sagas/skeletontracing_saga.spec.ts | 125 +++++++++--------- 1 file changed, 63 insertions(+), 62 deletions(-) diff --git a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts index d125f408ca3..72f002fccc4 100644 --- a/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/skeletontracing_saga.spec.ts @@ -25,6 +25,7 @@ import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; const TIMESTAMP = 1494347146379; +const actionTracingId = "tracingId"; const DateMock = { now: () => TIMESTAMP, }; @@ -245,7 +246,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[0], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, treeId: 1, }, @@ -253,7 +254,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.like(updateActions[1], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, treeId: 1, }, @@ -261,7 +262,7 @@ test("SkeletonTracingSaga should emit createNode and createEdge update actions", t.deepEqual(updateActions[2], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, source: 1, target: 2, @@ -283,14 +284,14 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", t.like(updateActions[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); t.like(updateActions[1], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, treeId: 2, }, @@ -298,7 +299,7 @@ test("SkeletonTracingSaga should emit createNode and createTree update actions", t.like(updateActions[2], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, treeId: 1, }, @@ -321,7 +322,7 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[0], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 2, }, @@ -329,14 +330,14 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); t.like(updateActions[2], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, treeId: 1, }, @@ -344,7 +345,7 @@ test("SkeletonTracingSaga should emit first deleteNode and then createNode updat t.deepEqual(updateActions[3], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, source: 1, target: 2, @@ -363,7 +364,7 @@ test("SkeletonTracingSaga should emit a deleteNode update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 1, treeId: 1, }, @@ -384,7 +385,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 1, }, @@ -392,7 +393,7 @@ test("SkeletonTracingSaga should emit a deleteEdge update action", (t) => { t.deepEqual(updateActions[1], { name: "deleteEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, source: 1, target: 2, @@ -411,7 +412,7 @@ test("SkeletonTracingSaga should emit a deleteTree update action", (t) => { t.like(updateActions[0], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); @@ -428,7 +429,7 @@ test("SkeletonTracingSaga should emit an updateNode update action", (t) => { t.like(updateActions[0], { name: "updateNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, treeId: 1, radius: 12, @@ -461,7 +462,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (comments)", t.like(updateActions[0], { name: "updateTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, comments: [ { @@ -498,7 +499,7 @@ test("SkeletonTracingSaga should emit an updateTree update actions (branchpoints t.like(updateActions[0], { name: "updateTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, branchPoints: [ { @@ -529,7 +530,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[0], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, nodeId: 1, }, @@ -537,14 +538,14 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, }, }); t.like(updateActions[2], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, treeId: 2, }, @@ -552,7 +553,7 @@ test("SkeletonTracingSaga should emit update actions on merge tree", (t) => { t.deepEqual(updateActions[3], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 3, target: 1, @@ -581,14 +582,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 3, }, }); t.like(updateActions[1], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, treeId: 3, }, @@ -596,14 +597,14 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.like(updateActions[2], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 4, }, }); t.like(updateActions[3], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 4, treeId: 4, }, @@ -611,7 +612,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[4], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, nodeId: 2, }, @@ -619,7 +620,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[5], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, nodeId: 3, }, @@ -627,7 +628,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[6], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, nodeId: 4, }, @@ -635,7 +636,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[7], { name: "deleteEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 2, target: 3, @@ -644,7 +645,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[8], { name: "deleteEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 3, target: 4, @@ -653,7 +654,7 @@ test("SkeletonTracingSaga should emit update actions on split tree", (t) => { t.deepEqual(updateActions[9], { name: "deleteEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 3, target: 1, @@ -688,7 +689,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -698,7 +699,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, }, }); @@ -706,7 +707,7 @@ test("compactUpdateActions should detect a tree merge (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 4, target: 1, @@ -752,7 +753,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.like(simplifiedFirstBatch[0], { name: "createNode", value: { - actionTracingId: "tracingId", + actionTracingId, id: 5, treeId: 2, }, @@ -760,7 +761,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.like(simplifiedFirstBatch[1], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 4, target: 5, @@ -772,7 +773,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[0], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 2, nodeIds: [1, 2, 3], @@ -782,7 +783,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 1, }, }); @@ -793,7 +794,7 @@ test("compactUpdateActions should detect a tree merge (2/3)", (t) => { t.deepEqual(simplifiedSecondBatch[4], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 2, source: 5, target: 1, @@ -859,7 +860,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[0], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 2, targetId: 1, nodeIds: [4], @@ -869,7 +870,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); @@ -877,7 +878,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, source: 1, target: 4, @@ -896,7 +897,7 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[0], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 2, targetId: 1, nodeIds: [5, 6], @@ -905,14 +906,14 @@ test("compactUpdateActions should detect a tree merge (3/3)", (t) => { t.deepEqual(simplifiedThirdBatch[1], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); t.deepEqual(simplifiedThirdBatch[2], { name: "createEdge", value: { - actionTracingId: "tracingId", + actionTracingId, treeId: 1, source: 1, target: 6, @@ -949,7 +950,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.like(simplifiedFirstBatch[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); @@ -957,7 +958,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -967,7 +968,7 @@ test("compactUpdateActions should detect a tree split (1/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 1, }, @@ -1010,14 +1011,14 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.like(simplifiedFirstBatch[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 2, nodeIds: [3, 4], @@ -1026,14 +1027,14 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.like(simplifiedFirstBatch[2], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 3, }, }); t.deepEqual(simplifiedFirstBatch[3], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 3, nodeIds: [5, 6, 7], @@ -1043,7 +1044,7 @@ test("compactUpdateActions should detect a tree split (2/3)", (t) => { t.deepEqual(simplifiedFirstBatch[4], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 1, }, @@ -1087,7 +1088,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.like(simplifiedFirstBatch[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); @@ -1095,7 +1096,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[1], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 1, targetId: 2, nodeIds: [3, 4, 5, 6], @@ -1105,7 +1106,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedFirstBatch[2], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 1, }, @@ -1118,7 +1119,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.like(simplifiedSecondBatch[0], { name: "createTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 3, }, }); @@ -1126,7 +1127,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[1], { name: "moveTreeComponent", value: { - actionTracingId: "tracingId", + actionTracingId, sourceId: 2, targetId: 3, nodeIds: [5, 6], @@ -1136,7 +1137,7 @@ test("compactUpdateActions should detect a tree split (3/3)", (t) => { t.deepEqual(simplifiedSecondBatch[2], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 4, treeId: 2, }, @@ -1209,7 +1210,7 @@ test("compactUpdateActions should detect a deleted tree", (t) => { t.deepEqual(simplifiedFirstBatch[0], { name: "deleteTree", value: { - actionTracingId: "tracingId", + actionTracingId, id: 2, }, }); @@ -1242,7 +1243,7 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet t.deepEqual(simplifiedFirstBatch[0], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 2, treeId: 2, }, @@ -1250,7 +1251,7 @@ test("compactUpdateActions should not detect a deleted tree if there is no delet t.deepEqual(simplifiedFirstBatch[1], { name: "deleteNode", value: { - actionTracingId: "tracingId", + actionTracingId, nodeId: 3, treeId: 2, }, From 7ec83d106e4b6addc256c25a54c25e364e960ad4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 6 Jan 2025 10:25:25 +0100 Subject: [PATCH 349/361] remove unused updateActionStatistics route --- .../annotation/TSAnnotationService.scala | 29 +------------------ .../controllers/TSAnnotationController.scala | 10 ------- ...alableminds.webknossos.tracingstore.routes | 1 - 3 files changed, 1 insertion(+), 39 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 0fcbee49b5a..8756b4ec362 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -25,12 +25,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingUpdater } import com.scalableminds.webknossos.tracingstore.tracings.skeleton.SkeletonTracingService -import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.{ - CreateNodeSkeletonAction, - DeleteNodeSkeletonAction, - SkeletonUpdateAction, - UpdateTracingSkeletonAction -} +import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.SkeletonUpdateAction import com.scalableminds.webknossos.tracingstore.tracings.volume._ import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging @@ -595,28 +590,6 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss } } yield targetVersion - def updateActionStatistics(tracingId: String): Fox[JsObject] = - for { - updateActionGroups <- tracingDataStore.annotationUpdates.getMultipleVersions(tracingId)( - fromJsonBytes[List[UpdateAction]]) - updateActions = updateActionGroups.flatten - } yield { - Json.obj( - "updateTracingActionCount" -> updateActions.count { - case _: UpdateTracingSkeletonAction => true - case _ => false - }, - "createNodeActionCount" -> updateActions.count { - case _: CreateNodeSkeletonAction => true - case _ => false - }, - "deleteNodeActionCount" -> updateActions.count { - case _: DeleteNodeSkeletonAction => true - case _ => false - } - ) - } - def editableMappingLayer(annotationId: String, tracingId: String, tracing: VolumeTracing)( implicit tc: TokenContext): EditableMappingLayer = EditableMappingLayer( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala index 0b982410ed3..c6c60595a8a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TSAnnotationController.scala @@ -91,16 +91,6 @@ class TSAnnotationController @Inject()( } } - def updateActionStatistics(tracingId: String): Action[AnyContent] = Action.async { implicit request => - log() { - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readTracing(tracingId)) { - for { - statistics <- annotationService.updateActionStatistics(tracingId) - } yield Ok(statistics) - } - } - } - def get(annotationId: String, version: Option[Long]): Action[AnyContent] = Action.async { implicit request => log() { diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 19b7ce3a685..84a00fb63ef 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -9,7 +9,6 @@ POST /annotation/save GET /annotation/:annotationId @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.get(annotationId: String, version: Option[Long]) POST /annotation/:annotationId/update @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.update(annotationId: String) GET /annotation/:annotationId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionLog(annotationId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) -GET /annotation/:annotationId/updateActionStatistics @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.updateActionStatistics(annotationId: String) GET /annotation/:annotationId/newestVersion @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.newestVersion(annotationId: String) POST /annotation/:annotationId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.duplicate(annotationId: String, newAnnotationId: String, version: Option[Long], isFromTask: Boolean, datasetBoundingBox: Option[String]) POST /annotation/:annotationId/resetToBase @com.scalableminds.webknossos.tracingstore.controllers.TSAnnotationController.resetToBase(annotationId: String) From 169d901d0443dcb6a5a7212970edb5467a831cf3 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 7 Jan 2025 11:47:33 +0100 Subject: [PATCH 350/361] remove superfluous for/yield --- .../annotation/TSAnnotationService.scala | 66 +++++++++---------- 1 file changed, 32 insertions(+), 34 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 8756b4ec362..621b126555b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -141,40 +141,38 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss updateAction: UpdateAction, targetVersion: Long // Note: this is not the target version of this one update, but of all pending )(implicit ec: ExecutionContext, tc: TokenContext): Fox[AnnotationWithTracings] = - for { - updated <- updateAction match { - case a: AddLayerAnnotationAction => - addLayer(annotationId, annotationWithTracings, a, targetVersion) - case a: DeleteLayerAnnotationAction => - Fox.successful(annotationWithTracings.deleteLayer(a)) - case a: UpdateLayerMetadataAnnotationAction => - Fox.successful(annotationWithTracings.updateLayerMetadata(a)) - case a: UpdateMetadataAnnotationAction => - Fox.successful(annotationWithTracings.updateMetadata(a)) - case a: SkeletonUpdateAction => - annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" - case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => - for { - withNewEditableMapping <- addEditableMapping(annotationId, annotationWithTracings, a, targetVersion) - withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) - } yield withApplyedVolumeAction - case a: ApplyableVolumeUpdateAction => - annotationWithTracings.applyVolumeAction(a) - case a: EditableMappingUpdateAction => - annotationWithTracings.applyEditableMappingAction(a) - case a: RevertToVersionAnnotationAction => - revertToVersion(annotationId, annotationWithTracings, a, targetVersion) - case _: ResetToBaseAnnotationAction => - resetToBase(annotationId, annotationWithTracings, targetVersion) - case _: BucketMutatingVolumeUpdateAction => - Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. - case _: CompactVolumeUpdateAction => - Fox.successful(annotationWithTracings) // No-op, as legacy compacted update actions cannot be applied - case _: UpdateTdCameraAnnotationAction => - Fox.successful(annotationWithTracings) // No-op, exists just to mark these updates in the history / count times - case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") - } - } yield updated + updateAction match { + case a: AddLayerAnnotationAction => + addLayer(annotationId, annotationWithTracings, a, targetVersion) + case a: DeleteLayerAnnotationAction => + Fox.successful(annotationWithTracings.deleteLayer(a)) + case a: UpdateLayerMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateLayerMetadata(a)) + case a: UpdateMetadataAnnotationAction => + Fox.successful(annotationWithTracings.updateMetadata(a)) + case a: SkeletonUpdateAction => + annotationWithTracings.applySkeletonAction(a) ?~> "applySkeletonAction.failed" + case a: UpdateMappingNameVolumeAction if a.isEditable.contains(true) => + for { + withNewEditableMapping <- addEditableMapping(annotationId, annotationWithTracings, a, targetVersion) + withApplyedVolumeAction <- withNewEditableMapping.applyVolumeAction(a) + } yield withApplyedVolumeAction + case a: ApplyableVolumeUpdateAction => + annotationWithTracings.applyVolumeAction(a) + case a: EditableMappingUpdateAction => + annotationWithTracings.applyEditableMappingAction(a) + case a: RevertToVersionAnnotationAction => + revertToVersion(annotationId, annotationWithTracings, a, targetVersion) + case _: ResetToBaseAnnotationAction => + resetToBase(annotationId, annotationWithTracings, targetVersion) + case _: BucketMutatingVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as bucket-mutating actions are performed eagerly, so not here. + case _: CompactVolumeUpdateAction => + Fox.successful(annotationWithTracings) // No-op, as legacy compacted update actions cannot be applied + case _: UpdateTdCameraAnnotationAction => + Fox.successful(annotationWithTracings) // No-op, exists just to mark these updates in the history / count times + case _ => Fox.failure(s"Received unsupported AnnotationUpdateAction action ${Json.toJson(updateAction)}") + } private def addLayer(annotationId: String, annotationWithTracings: AnnotationWithTracings, From 88c10f9745f9b8ba00e9eeda7622365c4098a04b Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 7 Jan 2025 13:25:54 +0100 Subject: [PATCH 351/361] fix merge conflicts --- .../oxalis/model/sagas/volumetracing_saga.tsx | 20 +------ .../oxalis/view/jobs/train_ai_model.tsx | 12 +--- .../left-border-tabs/layer_settings_tab.tsx | 60 ------------------- 3 files changed, 2 insertions(+), 90 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index f8710716e44..d7b17288db7 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -69,45 +69,27 @@ import { takeEveryUnlessBusy, } from "oxalis/model/sagas/saga_helpers"; import { -<<<<<<< HEAD - deleteSegmentDataVolumeAction, - type UpdateActionWithoutIsolationRequirement, - updateSegmentGroups, -} from "oxalis/model/sagas/update_actions"; -import { -||||||| 2b1242f5cc - deleteSegmentDataVolumeAction, - type UpdateAction, - updateSegmentGroups, -} from "oxalis/model/sagas/update_actions"; -import { -======= ->>>>>>> master createSegmentVolumeAction, deleteSegmentDataVolumeAction, deleteSegmentVolumeAction, removeFallbackLayer, + type UpdateActionWithoutIsolationRequirement, updateMappingName, updateSegmentGroups, updateSegmentVolumeAction, updateUserBoundingBoxesInVolumeTracing, updateVolumeTracing, - type UpdateAction, } from "oxalis/model/sagas/update_actions"; import type VolumeLayer from "oxalis/model/volumetracing/volumelayer"; import { Model, api } from "oxalis/singletons"; import type { Flycam, SegmentMap, VolumeTracing } from "oxalis/store"; import type { ActionPattern } from "redux-saga/effects"; -<<<<<<< HEAD import { ensureWkReady } from "./ready_sagas"; -||||||| 2b1242f5cc -======= import { actionChannel, call, fork, put, takeEvery, takeLatest } from "typed-redux-saga"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import { createVolumeLayer, labelWithVoxelBuffer2D, type BooleanBox } from "./volume/helpers"; import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_saga"; import { floodFill } from "./volume/floodfill_saga"; ->>>>>>> master const OVERWRITE_EMPTY_WARNING_KEY = "OVERWRITE-EMPTY-WARNING"; diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 821aa4f944f..d0478e6b302 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -34,23 +34,13 @@ import _ from "lodash"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { formatVoxels } from "libs/format_utils"; import * as Utils from "libs/utils"; -<<<<<<< HEAD import { AnnotationLayerEnum, + type APIDataLayer, type APIAnnotation, type APIDataset, type ServerVolumeTracing, } from "types/api_flow_types"; -||||||| 2b1242f5cc -import type { APIAnnotation, APIDataset, ServerVolumeTracing } from "types/api_flow_types"; -======= -import type { - APIAnnotation, - APIDataLayer, - APIDataset, - ServerVolumeTracing, -} from "types/api_flow_types"; ->>>>>>> master import type { Vector3, Vector6 } from "oxalis/constants"; import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 4546d9d9c48..30d8af3a370 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -132,68 +132,8 @@ import { } from "oxalis/model/actions/save_actions"; import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; -<<<<<<< HEAD -type DatasetSettingsProps = { - userConfiguration: UserConfiguration; - datasetConfiguration: DatasetConfiguration; - dataset: APIDataset; - onChange: (propertyName: keyof DatasetConfiguration, value: any) => void; - onChangeLayer: ( - layerName: string, - propertyName: keyof DatasetLayerConfiguration, - value: any, - ) => void; - onClipHistogram: (layerName: string, shouldAdjustClipRange: boolean) => Promise; - histogramData: HistogramDataForAllLayers; - onChangeRadius: (value: number) => void; - onChangeShowSkeletons: (arg0: boolean) => void; - onSetPosition: (arg0: Vector3) => void; - onZoomToMag: (layerName: string, arg0: Vector3) => number; - onChangeUser: (key: keyof UserConfiguration, value: any) => void; - reloadHistogram: (layerName: string) => void; - addSkeletonLayerToAnnotation: () => void; - deleteAnnotationLayer: (tracingId: string, type: AnnotationLayerType, layerName: string) => void; - tracing: Tracing; - task: Task | null | undefined; - onEditAnnotationLayer: (tracingId: string, layerProperties: EditableLayerProperties) => void; - controlMode: ControlMode; - isArbitraryMode: boolean; - isAdminOrDatasetManager: boolean; - isAdminOrManager: boolean; - isSuperUser: boolean; -}; -||||||| 2b1242f5cc -type DatasetSettingsProps = { - userConfiguration: UserConfiguration; - datasetConfiguration: DatasetConfiguration; - dataset: APIDataset; - onChange: (propertyName: keyof DatasetConfiguration, value: any) => void; - onChangeLayer: ( - layerName: string, - propertyName: keyof DatasetLayerConfiguration, - value: any, - ) => void; - onClipHistogram: (layerName: string, shouldAdjustClipRange: boolean) => Promise; - histogramData: HistogramDataForAllLayers; - onChangeRadius: (value: number) => void; - onChangeShowSkeletons: (arg0: boolean) => void; - onSetPosition: (arg0: Vector3) => void; - onZoomToMag: (layerName: string, arg0: Vector3) => number; - onChangeUser: (key: keyof UserConfiguration, value: any) => void; - reloadHistogram: (layerName: string) => void; - tracing: Tracing; - task: Task | null | undefined; - onEditAnnotationLayer: (tracingId: string, layerProperties: EditableLayerProperties) => void; - controlMode: ControlMode; - isArbitraryMode: boolean; - isAdminOrDatasetManager: boolean; - isAdminOrManager: boolean; - isSuperUser: boolean; -}; -======= type DatasetSettingsProps = ReturnType & ReturnType; ->>>>>>> master type State = { isAddVolumeLayerModalVisible: boolean; From 0ac9c4ae4488d7bd2df93dacd90d18208aece3f6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 7 Jan 2025 13:33:54 +0100 Subject: [PATCH 352/361] Revert "WIP: switch migration from threading to multiprocessing" This reverts commit a8babc04cba6ad2ff14e0cd9730234d5d6743884. --- .../migration.py | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index b4048aef54c..24d5c2c6f82 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -10,7 +10,7 @@ from rich.progress import track import msgspec import concurrent.futures -import multiprocessing +import threading from functools import partial import heapq import sys @@ -39,10 +39,10 @@ def __init__(self, args): self.dst_stub = None if not args.dry: self.dst_stub = connect_to_fossildb(args.dst, "destination") - self.done_count = multiprocessing.Value('i', 0) - self.done_count_lock = multiprocessing.Lock() - self.failure_count = multiprocessing.Value('i', 0) - self.failure_count_lock = multiprocessing.Lock() + self.done_count = None + self.done_count_lock = threading.Lock() + self.failure_count = 0 + self.failure_count_lock = threading.Lock() self.total_count = None self.before = 0 @@ -50,15 +50,15 @@ def run(self): self.before = time.time() annotations = self.read_annotation_list() self.setup_checkpoint_logging() - self.done_count.value = 0 - self.failure_count.value = 0 + self.done_count = 0 + self.failure_count = 0 self.total_count = len(annotations) - with concurrent.futures.ProcessPoolExecutor(max_workers=self.args.num_threads) as executor: - list(executor.map(self.migrate_annotation, annotations)) - log_since(self.before, f"Migrating all the {self.done_count.value} of {self.total_count} things") - if self.failure_count.value > 0: - logger.info(f"There were failures for {self.failure_count.value} annotations. See logs for details.") + with concurrent.futures.ThreadPoolExecutor(max_workers=self.args.num_threads) as executor: + executor.map(self.migrate_annotation, annotations) + log_since(self.before, f"Migrating all the {self.total_count} things") + if self.failure_count > 0: + logger.info(f"There were failures for {self.failure_count} annotations. See logs for details.") sys.exit(1) def migrate_annotation(self, annotation): @@ -86,13 +86,13 @@ def migrate_annotation(self, annotation): if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) checkpoint_logger.info(annotation['_id']) - except Exception as e: + except Exception: logger.exception(f"Exception while migrating annotation {annotation['_id']}:") with self.failure_count_lock: - self.failure_count.value += 1 + self.failure_count += 1 finally: with self.done_count_lock: - self.done_count.value += 1 + self.done_count += 1 def build_mapping_id_map(self, annotation) -> MappingIdMap: mapping_id_map = {} @@ -555,8 +555,8 @@ def replace_before_first_slash(self, replacement_prefix: str, key) -> str: def get_progress(self) -> str: with self.done_count_lock: - done_count = self.done_count.value - percentage = 100.0 * done_count / self.total_count.value + done_count = self.done_count + percentage = 100.0 * done_count / self.total_count duration = time.time() - self.before if done_count > 0: etr = duration / done_count * (self.total_count - done_count) From 534f80d3ed882d5ac487e411281c06a85812299c Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 7 Jan 2025 15:25:07 +0100 Subject: [PATCH 353/361] refresh snapshots after merge --- .../annotations.e2e.js.md | 8 -------- .../annotations.e2e.js.snap | Bin 16749 -> 16665 bytes .../backend-snapshot-tests/datasets.e2e.js.md | 1 - .../datasets.e2e.js.snap | Bin 2689 -> 2673 bytes .../backend-snapshot-tests/misc.e2e.js.md | 1 - .../backend-snapshot-tests/misc.e2e.js.snap | Bin 1239 -> 1228 bytes .../backend-snapshot-tests/tasks.e2e.js.md | 2 -- .../backend-snapshot-tests/tasks.e2e.js.snap | Bin 5467 -> 5455 bytes .../.gitignore | 1 + 9 files changed, 1 insertion(+), 12 deletions(-) diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index f94da6482c7..c8792a296d7 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -26,7 +26,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -150,7 +149,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -390,7 +388,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -564,7 +561,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -745,7 +741,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -872,7 +867,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -1001,7 +995,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: '2012-06-28_Cortex', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', @@ -1125,7 +1118,6 @@ Generated by [AVA](https://avajs.dev). dataSetName: 'confocal-multi_knossos', dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index 4ce14317c58f27dc49891798adca4f92bb4a66cf..c0d2ea19ff2f3e24124bdb225f4b5ab5b12ae0ba 100644 GIT binary patch literal 16665 zcmZ{KWmH>T(>CtzE=7yGODXQ|?he7-t#}K?-QC?20+iw|#R3$k5TpfK4}U^?iZ*6E6=IxbKh}5U=zIvdrnb z4%_qD4VmjZk+=?-^HF!!&g$r03Y+sN_;yv-?tW?PaQ9Q3N+W&MHbCrHOr@y#d}6u` zeCd(UHDP{r->cbIT{(8%)||Fip^*HbxuiVAg~WyW2f-8G0X<$tixK2ZAVCKjAb5T! zgG{iQW%*OWXdZQlF~9c3?iyXjD$DMt`rgn;m~#4miN6jlU*b=u6K+G z81?TYD|-6lgy!G5b~Qcq^v+v$nSX1S z@31N+2aJU<*EGzWw|aJ?d^Jrnrh*>+)gNVWsHxdL_U7*N$TzlmcvK}~P3KNMjWVp3Gy4mc;!u8pMHgHDQS+SpUEpOJ-;JQ+fTThC8?x(dvg98z)5GOV#A1)H zE;{|pkQ3H){c4^mTR%gg*9swFsa8R)^luJL9!nkU5Xa1K_MF|~Q_)?N71z7o1M|xk zT}_ND#p;6LxO3Y^x<|f({p%*HPN&yV$jyEslI!O;y#kc{dU6ciV$ zZj?)kjpnFtf*;gVzuQa-syT!%0(?w%|KZlufbLD!DX`Ml#ZqGo9(# z9y)?jh&XYO*Xr3EheG7lvzC7dWYpS>J6P;(oYr!Q#9k`*E$NH~#lGp#W> zJvr&rn4v)>?6zt!@CMTJ95E@8o;7!K=g{if(LSItzicFtMyT@78CYeJU%t2dbR*Fs zFi`0%G0*jVa+0$xttSg3-RalWbB4>nA>LKjz#X;5w`v#1ChS|)c$}5?d}en(UMqLv__L*%o6kUuDX*}~3}8*1$V9DThGPJH z#buabq^-LoF&e@&qm(}}`bwg;QH|olZhXqomN?5=Hfiril*8@ygPJBFJhRZ!G=(u$ zpzAcTsXLp~^*6bTWURrKnY~cXx(YPsGsNG(6tn(vJj-7(:HZeGk$-@U8J3$EGA zKrn6V01<_eIOF@j+>uyNHBTx*#>#D7W2gS6goa{8hu+T6Iod;g5OYw=lJeqjW)Tq5 zfI74kJ)9r?uv6?b;oHOb0)}NuExZGnLp%q+8gI|*{i*zZpM#NGJO+Ou8`?LNe;R^0 z-`|CYmp2Q51s!_Fu^!OtGfe9+_%b?OQu;qe45WUkWSO}=Grcf4EHLU)n?yBn-?a6a zI8LF|&)3xxVB5;il4J{T^O&p67(8e!_ae9L=x98+=K$OiYr!Gz?xke-E^I$)3#cqb zlI4$logU1occ@#VUV;6}OcmLyFAJABx)jOl|H1C+KBj!taZ+ixv_C3_8>kr**cz%B zzY38W5PL7iOhRC5ZNt%+9G7Vmw?@bmqB~$?t;sP+jXCn^03|Q5A35F#;)Ge~gg=(b zce6v%yNK}C3G^)QRc4l=!-*dU{SXlm)xgvIl4wbaD*O%f7L@b;COWWQEmTV)JQnC4@HhS2OtP*J1($|I` zOYt`;KYqCJ-QI#BM|jMyt0N)Tu7lpy`Zwnx+vUIdwz8yj+`_^gtn4fmjKab#tis|9 z{9pr<45qXp5C&aZ3kc)0UoJA)UN%DKIO66`3*^&Cv;%Cn)*uei#9vqs)3|`;fqzc-4I~Hga1j3F(cqlY^9mpc zEyLYm1Cro9_<&M~&vuc!3amUILb0EQ>x zAqWjp!e3nJQ*SmLBPjx1)B%O){&|pa!e;^+glvy^dIvJsc;zy@)=dW^r+9DvLfEu_ z9Jzn~J~RI|p$yVxTbc~8@s}L%^-vMGWcs<5i8?*9=%*sk!?W#Q1SeYVdy)fNdTx@n zu~GCg{8s8VfH}t?Ip!N?YUJ+@ART67JMacJZ%(pd8`Z-JRdqt*ki>}}11)+1Y@CqX z0;smhP~TEw_T;=o5rpVy;>W>10!8{@A;(+x&k&;N0Zx5eXy7fi@-)Ih6)`OmDd%0<*SPgpc3MxXIQ8jwec3KuiSN+@NrD8+{%f zJ7M=j7Uu76Rs2&@ltpyqF+%Yw=Tlh;Op+L*Zoi{k0#?6 zhwqPpkz&*4$v0Y(3a!S+Ng6jmS+lhtzp{_^B_zrwG7B5F}a|G z@~KiXL~|O=FIYPpC@SYqclS;lJN@nuMw}{WSVTZ}zmbuV9L9=_4z|QFLzcj}L_A<5 zdoNRm@)Gl;)ekgkhy^n=FX*xY}nuNL{fX7a;6*r+N&O!9IGOc?yA zh0DxKtBi%#Gh9Gtimfgb88GWfXfT-gap+Kk59o~23xeY&gHR%-@Qk)8h%BNRlJCnf zJTIduGmEVCl>H>wi_e%J^E1_1$oM<+Kup*gE-3SrJiY)~@Wl?@$aTg_5Nz)NRcLz1 z+h^gwPm78stHooPc1}D8LA@QcMAVjY1Bp1ZQ;OI8Ojjtnl!#(Z4;lW78gc&({oNX7iy@ne_eVW5up-z zz1+gbGt(8x?5$?m;wkb)=|(h7-02uQ>kgS-4SZm`x#HWS5|-P$caT7paPiu3W9rVx zqjYuS8kp7QC}>{mE!=GrD6JOVMfzL5lu?cJw=Ia=mXh$)XFUIJF;?T*=OdvW$Cr$1 zN@H2oqUPV6nRmZeyM`a{cx1Awop&!HNO?1Sc}j68t~nYFik&<<4E|aA=|aRicKGYa zlqsyx3&HO7ZGQVRBXgpmIVL~BJsL^FlL&@J5N*ssj~gyY@b?>C?39@Oa*QF4f8Psq z{oJVZ#$?>h&_3-Byg=_odYN(lV2pZ!m_Z3>zb`6l4G$nqB98(%;dpvX$^Kn8R%r>l zH#~g9U4SGJ$^s(X<-;<@`(s7V{`IpPfVqSO^}FAyEN5@TxFz4fkx04OaC1l^4Dd9{ zkum5C;@Vn^x(;3%?>JOq^%J!UH~N{+tnEIC5k%lBX%mEG4s_f5@BgCS{cV)LQ)F;% zY4exRVNSzMBR7b3v4rPN-sT&#B8t=fN8KnBlUKv1H|fTN=#=7o0>`e^D^#SFwRgN* z?EP~l`mUHO^a`i~YZIsTnV)9v`W0cvP@+8!M5(0 zGXLE|dgrPrxesG-oN(uuS_@I?`Y`6W*gm<>QS90_`DdSf!tqvccHx)Nc1GD)b0~|+a$e*gY5XGiBr>qB8Nn^( zy<|>VoiDmpP8c&PQTar@bjVjKQDN^#D~9Z{XX-wG#vre2mC(n#<|f?B#bBu(&8mau zvPykmer?fvlpq$6vgbR_vxp(LbJgp{6(L<IjZMTqe?JGu{@@E4er9tQ$pAgvG_-M6-0K?wo(Bj_`83Y3GyLidni-Mp_HuB z=-dZ)9q4)vWE5ft9Y3%xW&w%hG`wJOhsQ-MNn1K;l&LL>8__z3cTxd<^`GW3ckoLf zpZ5b4g$|a751IcdvL1BwiXYr1q&@1fD};8_hL4@JnA08HmF#gJWbRVmh>n5-l%b#I zkx6=I5O7T)`k2pb%wyf$2g}kp@$qSojp>M{R3e8~KDD3boxK1CRF{b{-$s)k2hYU6 zdPh(_Q?VV`CM7%uDy1j1!P0uV;Sr~~EMt(W83!BR0CXjuIeo`YIs!%TaAgUt91sEt zAlpcReDJRYaAz`7=u}!yRr}5<(nq_e&=57?iz6 z49%STP||Q%nt!H7>N`QG_Ocm0LzM}|n!i@WeJ?vu>F8?J!mM%GC!qyzu$U~IS*73@ z#HX?AUjUn8RDGePIIr)e-lem@lp}W0EQOu08=mb`Np5q;SN@;>WEV zOCC>22Zm!GXUz$mim80dgrDOD>JC?k_$rXO#fmBbDE4)3LX(L$*IS&A0H%tC9nay8 zB95Du<0Yv(G9{(8`;l~xApalkQmSfSjHwG3!bZJ?&uQ{ree9tfUrPi*n9PrBNGSX% zgtm{rWOoV<|Iup&cs}l_!ub!8U-2Y-*rDYE6Atf`?d5?#?#%AqN0XfrBFfsDG^k)B zhR-FPVL^So;hx<%X{I3{ky@+{a%%= zUX>oG@kc-hzq9*=fwbYeeB#W4!(1bV$Fm?A0xyIv3Hd>b( z8MTIh2vlcm%_LevK$ZVv)57U|6SA{zu)CQ|l2Lo6)H1cnYgd`0RxE>cNwrh4zswSkOLr0R=LrSYw4+ zq0w>oxN46xG^g_&4P^C~6>N26OOqN}W2gGCr^)1ZNb1z<*y?_=x?j$u4UrP6i+MqJ z%#RaDVzjbhWD6_W4o0LD`TZMMTeE;7@>eR1d>1La#}Wq-G8zHW=w%IlTO- zOQHomNJgmafD4N zf>ykZuQ?2E$;kdDkh+D9uMekt5vD*q)CyT!-RT3XtZRt^_K&u@>0=;WFK9TGT--!; z`~>(kFqam0&cp5ejoYbeE6Q4IAtyoqaOZ|F*PS@F{KE3re(@eBsDmJsL8cw!xW3c` z*;K#P6dAVx5@zX9B)I6Lnww7yT19s~= zoaa;S>2jE{j!MDZ%j3-7w8&DfODsIShYVvgzP)g}r{gVD(HH)Yp?CcrlXU}ca@B8G zI5?uj`Bq)%8u!!*Z-Id`x3qZ>OhPIFkSBs)ev0vKL6|)+7>$1hE!{Z0i9BgW%Rc@u z8sD_1vvfyjNE@Rxhfhn1)GI>=ZMO1hpomjHEzXYx1leCMW$6*TC>H5aCARq+ebsUC zF^z3nB$%eQ`x6WIH}l^{+1sF5h5`a%p=rp>rvLFkWE5^c+@mPN7DyuMVt;Nt1H}0ni#{x?|xGLiMEobWwn;S zAxOz6Rg30l>XnI(P=XF2-LU1>Aq-$>7=UnyB}${i*d7g#gZHEc6J3m&;@w?(b+IG# z!G`YRNx|`BN!9UD#7v~()K$Ar^R?$c*wVwos=#Od;sy83qGmwW`D{Zo6FnU}>XIyH z&bdyKBNBTDV*3{_xC)$Er+yw0jr;z08kLxsENp0LgvMLRghk|3EI)kY0OdL2I4mwj zi(hM_iudW{5e0e75HSTID?l!62||kZe`OFyobu;F@7qJ&S4OK$0zY70KV@Wpq?5NG zXn_~YtwS8ZnZrmW+Nq8i@PoGF3?`}${pK;owcmdypYsnV^jTFzX)z@D(Fk{9L1~c@ zQB4@AfOyFVWRQBju*6bVR=wLwA|m!f((zRm)>$Dp><@VT^0_>KA(n{zO-pH!zJ_B5 zpv`h72aX2Iv#YnRA=TtUya^?UY0U2H&|lr7X+uxW*hb|YS|Z663atGD0bc^Z#j{u% z{BCJ5LyEb5%|patbUny?DD16`R)*eiIAWOoaEQ~5aaZ<#h?kS33TCFg$dDduQ)zBfR zK2?r@qlYzInp0k^4Z34{ltcbxRdk@nq@uT~i|s6Bk;7D8@5H1}5!cmbuwSfEnDFUE z%|(n^LM1LYnZsS|XVL7iI14K6Qh{F*ZLC9T`(D^zr>o)^oE=F@Ts>D4p2?8 z`~Dt6ruaTKb(1OH%mud3NYbFJC7e$1(+w01ZmrJ&f1kEvRV zF%?#JM1g&Um)foJ+oGIlIyx)OtQ&;L>53rn^1u*t~BPiuv3x?Y}?0b$Yq0ns=P7!&pCpOOllDXAU zY?DAUqMVl7-9Pl_(k}EGv6Dv)gZpBW3m`LUqPPz$4FA%II=>4M3pQl#ZESnXDwm<< zJrPfNxOw!IRcc)ZhsU*37MOalz9;e$9{ISLI?E~il_;sA-LmXbq2r^fDGT>eL6X@Y zf~TM30)kez<#R~hQTDNgkpijm#7N*q)UkKD*D6_| z3fZm|kapG$N@UBVl6PQw61D1691|YwRD*nNYT}(M@+?!mjX%iCSEsVvxiqO#m&nWF zi`HDZ&zZZ|S-T%o7{^`3Y?j1zEAEpd^8H83@8si(B9{K;GH+Bqlqz^Fxh~SYRt8l5 znbZhgJ?QjFBO!4vGs83;;~o&1)xw%qG_Q)=87?Aii50W@$&<6&bbUsAl{ipUF|sha zj(wL)wztXH@ljJv#aF8=rb%prJ_$8_9kt}6sy6N^M4NiBEC#Wh%~(sWVo@0X1M@!( z2z6Zo75xIOZ*l;@f+p@|yXpG(qbxl27n4Y}^?)%A1$o;h4NOu9@I!a+=+dcqOasw)U_x3F#b?Z3#jHNg{dJ(0CD)Kd|1ZJ)3g zQgRL1y^yyYzI7KT!?PG$3K4OQqF8&3q#EC4ubaEImQ;a9oXr}JvG8;zBfioF6G|zR z0~2NvuJ-C#o?>Qyzo3tK)>Hngj0kT?bSd6zPP?7=^h|{f5y`M&XrEy3d9A=c!c{}*#iIeD11HDA$F^%FwWE$NGwc7*%wMbH9=f2J zB<;RDny7+eB9^Ga7Z4xnQcp3v(yS@9Je6w|hcp_M-+*Wuu4S-l@S`5C17hIGLkBjX z-*ezi_CPJHO6@2F<#7YfWzS7=D?p?wfFisDznFhi5&O=aTYdtGu#tdep}?LkT^K|{ zc*zHPVGi63c#i1jE@+*>s(Hi_4&jGxhn3K*;zX-Y*)rBC0)uS(54=P46Q`New0`hi zLzl6TzDo%;@mUBwQ5L0jRtwPRE}QTIqu=x3_ugFbMx7K9i#HUi7ie=i={G7^Xpq~j z%)!5Qe^A{e;@7rxJ4}D#+ETd=PpOw}{d4xq<0d?XF^tS;*QMaw`3nu(Q{6~OVbff< z6u+(8Tx}b91>fx&DG7&}^iH31n1X{r#epTF>Myhm)25qn#OG^;ZMp@eyk8ON{0?(d zmTgu+!0GRa`oW@cZ8DZ~XKnZqq64<#(4#@}Eg9~UtquPnDq}8>PdWTc<@eslM$yBC zha51S9Ah!)k%Z@2Y0h0CqtN zdUFoLmIVI6QRM{tP@&h2jAfdn)B*R&{Uc7ODDJm_h3O#iVHl1OE!wNMZ-y8>d@v(0 zGPe6<=TlVF=_2~)A~;M!U^W~JV=NjD>o^?aEE77w9>J^xE`Sjziw;fElB%MP{s{Q6 zG+YSh$qYnAZXdZt5Q9 z!-T{nL#FtyVHSL58Cc?(`^R>@!sIlX@gqHJ^X<$r&#^%((E*GeCQs3wx3%8xvNJ?loncvZLxsQezFfMHgMAg&8JCh9fR z3emk_!JfT1|E8P2zGGP>&@>Ped&qcE_F<@8w()Tq=VpK%58cV+=|SC{3$(5GHno>| zZuxYr_Pgb|MAhBoiIO59go5cz{HnOP81|rky?NiLol!rSvp5o=eto@Z z_5(NMud3_S5x2VscZ73t2)dwTkKtX1=fkbEcx%s_v7_f(M@Yy&SBjmJ$rWGalH(3CxB*UJo!urxjI;9`*UBoIq?>d@?Uma=BE+P!p=il6{r6be>?#1~8Uj{9GoC%ko ztBSIZx!v{iM>=zA&;`T9nx2eR``c<7i|JJnkfb-%4+9w z`4j_Lk^y|93)CjcIqI)}<3|g33uFue)Y5f}h4846VKvBU8^f8J3zMbMVr|K58$(TA zfL4LNJeP#>_M!aeB`AoprPrUeOy8$w#slx^t!NP(1sjjutc7g&!{h<{V!zT1|0aF_{@T+w2- zq{O}0mT5Bow8N;cp46kxLcid%c4anO2fRe&L3Nq$XQKx>SZAV}j9VF0wFJ$p&lfoY z#W06w5Nw5JCQ)343bU~X+O0i>LN}oi_aZW@9C`IJhgM@EMnX7d*y}whF^=CruU@6p zxMzpfn}M|y+_m{Ex_*BGdjS#>A~d@my=n7NyrNpUh-p+C*@(D8&u=bir7mC8R(nLD zdsIr(an60M?@764B;4; z_T|^dAjP24e7aA~nTgfvksv~=c-G()wN595ubPp|@{7OuaLt9`J&NJDH6uMKniDi* zHwnwU-xW&X`>V(1!1ngM%S-E(s;5NI513`H7>!l|VIaodFoHoo^jAsr&P@6MK8^y8 zTDGh@Z}s@pOzRW7z{dT*-@bnqK)$U8-oA(AB8XGIMgWpZ>pxbX|9TJIC0bC61L<17 z=!t~0UFKups>iL@L_1JasuSvo+SOO9#~3e{22&I+172nX9@hmHj%D9MbHC!QK0#|j zWwKPO&*MXTIcY7X6JoH}E2_bRXOLVRN3{gk8n7$&8*MIEZGGm@WiEmeN8}8G%spe? zX$FFkF&vGIo?m-!W9AAe3GfIRP{JAl`@7nO5Lg2jPGsLRX`ku%;hLPsI-V}1UTv+{ z1%-+qQKJBUUD#|y9Pw2c@$YObI=}Uknut5|W#Ri%G4tk&wmfJ~UTXw%t^;VsSW5s? zqXc<BIOT_3jLqOEyYB`NHro@Ud=;X6m8A6bDSe}2Eks=PK{7v? z@+(@V^I4?Y5J%m;nVOGF>1z$;5;++q_p$Z_zqQN~`LAW#%xGfUk6>@a* zuiJpvp9q2e@S*RfEZB2WP0-@wczny~LW<1%*FGAx;<3qXUL3V=k&O0X?i*MlP(al>&{kz5p*WB>GbjaF?(#$bfIO1HhN5QK5 zsa2)sC}-11<^kXRj<{nsr-5x)bMC#b7krOS9;M= zT1ZC&VCByNKOaHNa2&Z3F0#QF*l!VCW(!@0&Fucjj7^cs>{jmJ-%RX~W@6|B#-jr= znc-+BXX~{K17mw>SZJvN3Y`i}rFu;}QH{Znu%}ad3=)#fx*E3~tX-Z&6pX zO@XGmrST(h09^S*wXwK%CHYO39*-2SI~hDy|1^Dc2!U)mgKQ1YU0jV-iMSOqiZR_11Ya2k){4E1 zRWFALN6R~Po;P48E<0{vSG)J!gyV^WVkA2- zg*PO>xVE;?`p39?`>ZUQug^LwD$dL%r{m1eviMsId!`o_ypdEFL|l$~XqXZ53X?XX z&(Ags!W2&w=4R7l&yHWrH)i|0@O^ydex&7fjI+<_AN(k#*U*ls!loE=KWGvh>z0Xm zWHxdAFE0J)!=eV_TTxfA3BlmfQCG zf|lA~X*Hy$<`&GOEzjb@(jUPW6?Scei3Yo1eitZsg-a-^3;tJ<_ud}WwYJA@6ytp= zzLWx=zIAr4;jN-zA5gkA$?TwD+uT~YV%G~feco}eqHhH`5c%z(479S^VE>5)d$g*1 z3^!p8JoQcXJ=O5+NDTH~^auHS!Rl&BRb1eC3M;vow}%v2bpdL&uetC^j@-;FrOK$! zogggUblAe9!s^qcDZ|T3)oY{EyRIq`-`RqV^1~_#+h8+R-DAjJeBWGX!zt#@9nitL zt9Z7Z9bSIK;tp-Zmg)q}m)8sA9sSffHOJVjYOU47(==?C_|R!)f}vjp2!4M_ppX64 zc-IsoJ!jOk3QDUT`Z?o>zT94x+-S-wkbH)fP!BuHSgIOJWeW1h*Q-ZFTWuE<>-6-8 zueS!vf;DDdPM^U(TmPt(J)zS(SBvXYx=y=HRbB zKtm$h2D_kor>B43k9|}bzdl=&>ZxZM7ds{}`Lp(bNwg&_zk5eH1r1R9Z(6rdTM+8c zem0h}N{I32(FZ~>WJ0SmjyB7W-V^gwAf(krrk2->@=cQ(0FQ_PipUZyN%Q*c5|*9M z*2tj-@bxOE=I?7w=sF~Z37GXqZ&d)F4luJgxJfumEYKYdM~hVg&GfqD9G;3)0!IkK z8)F~YWJ2pQjy^8lLfIy3ql5Id-qo9Mb%fdwyER2O)I+4SL*p`zY!+`Z^`R!jRJ91^ zU8VJM4t)`VSWWTHtAI{c58ZY%W)pvX@PtcmD}@KKe%3UHv@Z6%Ay{!0+Q8~z*lzaT zM8p4sCu&LnkRYM7Ud5qr6kZT#<2Z7sJg_UnzQ!r21(7BID2^k-T3`RgIL+&0IiZ4> zb+#_vUa8$Cr-{@AI+KRTB6?18y#(KMfapFv%izygZn+y1rs?11_WIbTE~OFJYW4Vf z?q7{9j1gU3fx3t%Ne2Z)91X9G#UDw!YEV3yq8wcyVKO-`1#S&4E1H5p6r~-wJW8Y| zej-hMaaD)VT@<5scs3!Rf%fL6_o28D(2o+bj}47yjYuoMxwD(Vb&HIlq?Z8NgaQ%E+f9!qEN_6OG><?ca<;!a%mGfhf0wEWYz1Lwt*8e+D^jF_kP8l0ZP z^KO#FYU^V7FCb#tn~2_r>1&{6zt$F))F9^VkD|JMih@AuULi+B_x^xr?K|i?}`l<(5G{` zulO!w?InCiq+Jc4c!S@*!%xI!t{1p#C6FB z{`r|sZ_MaR0OScJZ$8p)>i>6MbxqByp!@lA=a{4KGC_ci&P|P2ZuVmsacDWh$^yTH z$${H{$He#q^Z#CR-*?`guO%pq zocGJ4ft(zRqvR$VRsWq;O$=CA0O?UTu0$!4vhh!b_Tl)SLaeC=>tiA=tXgT9FUpK? zYtHvwBrli2J;9#{&tmP-$&bBIa-AYqQEGXCc_@KXaMxsT43*^8(fC5-`=Jcm9$3Vv z!Xq}ZJt@#3{2694n`m2gEzjr%b?7JbT>+VPa)mtWVI>w01*(^^WQ`7jn*(F`0K`wE zwAhcLAIFlfkxY^VZfx8Xzr3WK8ny2c$TJ^j#_;p*Q3o*}lZs~~iq=<5hA)d{Uj}r& zBlK|xFdtJg%#S_$D)RBZ^;B4B=zPm~;nK0QX-1nH3-$Y%mrHx8#C(iMJwE34A07e> z&lZMfNd?0bhv7Xfr`gzOoRhWh8Qd8SDgEgap)nYXVW1hL`YwJDCz>qd%;ipWvh*+> zlY{+~7Z@l?^Jg4fbJKLPq!RRoFw#=_qABrAhFg!s!}6g%y)&@$16E;asqa^#vq7ZX ze7FXJQfBz>#If;rOnElPy$*&5m-tG_mLyX-Wm>~GHdPNyzyukkWL;#OJu2iX)o~SE zrC94k9!dN(4Wt|;Gma(m_tiwMb84}7$V&$avA3Lttb2<#RVB)?Co!K%8jx`I1Tj=x z$5jf3a9Kc;jJaQsT;&gJzNjekuIn8tN6#WS-~9ggB*gahTCQy=fDsf1bM}C#qN!sP zVL@*TeW6DWSC0N^M^=Q@fplymDw*!RTXvjl$BG}AJWps8-;#0U>8=@X4!=DXwe)VU zrSdl)sdOe5HxpPob3fDWk(oT4q^0 z7`Fa)*;-mSlHLV)r3ij}`e1l3!Z%Yh5G;`M=bDoPyw}r;C9H!foaHa0Syuf!d~S~= z#{0F|W&e4oJ7=UCHJCat|M<5Dm%FlOg!8(L#lVtCYbhTndtdH^)?(SJGSHpIfMI$twU{TC|d%Ae|wiR9{!+AMOiSo!P zu{4U+=xP06zVG;;Z^@76L53DV&Kml42DUNzy8huYl+H=sV2>>Cv{Z)225*K~-qya{ zKBhDnuaGIp#I9TzJ6akyI^t8gS9$HtvVg5R5<)~$>tG3*JTAuuE}{*9c*#ix$fT?C5;2Qxv|5B$4$AG!i~^cAU9_;8Ki3PAdr zs5jicQ$8V`8tJ0|lw^Q(#k<}qkx3Ddq$2%O67gf)UWUG5zRPNH`&V-VR1y&(2u_Vo ztN`+3aFHQDq($c_fJiG!*PX~%be^XE_|zMw#0yqfXR@+gxqe;ANsVt%1k08?Rjb|q zT7>R8J<1c!W+eW(^+M4kMvv^UFTa@ha6={{Pejdfw=l;Gcp(c6$GY_FrTGfK%wH;i zsPR3LogeZP|0a$8oylz_1$o(Mk^(ahd)+ZTu3%zRdScYkRwfJGIImT!^0tUkSHB4T zN^PSUJ%qA(WEM%4#Lf|Pz|ram`V;?ljb9gPFD{$fVh<8Grn8p~7NRP`{=EsX7uwFj z=C+R!zzP0uDVmBW5)bJjsJ^DhxMQztro;q%i*k{;C&v{%EDM!z`wt{UICbC~IQ4LU z{06EI{=Gnzn}_&^Y9k*}+#50u3zZb6Q^33W*X+}5oOU0*5R=OJQ(vn9X<_-PO5Uj` z_B+y}(W3HGZN5d)utLyYQh9@2xmVEV4Aet81js5MF7YwwTtY+?*I&?VqzHBIL}nhH z#{+?*7>-3V@)xWUDXJU20SFRO@KQx^KN5M|-!&GX)u_cbFpq{2?k~r`RVhc;+uGMJ zv4B1+w>lAsP&9kyB57(y&of5_^WEoRB{R$RDIKwItvPQCki)zw^?8d)Y62bELCy;8X9(7&);ku5;nQcWQ4zp%7On3jN4tk__Dv3Rj3|q3) zoQbSut6u^leiUX~fFVzc=KzZP?+M$XM%lPE$KcifK0GW{f4_tVve8Q6$&XXVW&G_4 z{Zx#DC}*%Y%eVM^#uFm8A-8yFS6=g1B*qg@K0PP#)n4p5Oe~ZushIBWb2r=!@@I3n zY0BpL_mKsdkT`_LLAVuux)2l&f}qeNvr&a_SkReBc1NHr_ByKLhix@7VZ!X0*rYsw zJ^1Y*Nn+UasNbRl{jCta-gqH`q%jYf(h;QJ0eLls-g0Qo#_zsMZwb|{_F8hwm7=`t z*iVVSU~vS8bjV1e{+)~37PTrttbw82N@h>Eo&eQ-AXTDJ*q5obkH?iuZzgW}cyz80 zssnzr`~*|t%v1nNw9dp71_AWIzn|_P-9gY>i4AF7fjfFfDGp*-#Nu?Tl*h6!p}lfn zvwp#UT?v>^Ks^B$lwacXT?X9kN8FVqOr8zsaupmNVp%XuU1dyrn82}cMmETAy8av( zrZIujznDcR8r`8gKlq=3OXY@_*`32Ucl}IA9Tv?gMM-u#Mt$#gxh{`|-<=$v@|>A>4DcjhWlXl6786w$cwWPr zMlKjnaBV%%S0FFk&e1RXpF9O7?+4p0?(D?vJUhY2f@_|g?E%c?fn$X<`5q26k{#87 z)`IJ#!UAbn)0@qub}h!|&C%~VYPSgE2L^&PVJ3h7Kcu#K?yexG_kD9R3{tWUCJ%wh zdtkk|Evdhc&CgSFOqijcpEb#j55vi^A}r!pu1g1Vn7{`n;QTep(QgV1UmSO9c88t8vD@l3#2Whq%;) zN7i7usQ=t9q+w3JTH=~1zxV_*Ndx=)2q$|~8u`kzpQ~(DA79kk3^?i@76j8n)W`cJ zxdtlSqIP~6J2%yLp<|?|_fUFvJsTXpG_80vu%SEa4{dUF+f-C^_fH!Mr+{Y=aaNF=Jkpz?QEBMw3L~EYC9hC)tKc zJTNkmAF5&TJHq0>2K(MYxMreF@kB|?0&?xZ>o96Tnno1HYoNF9Gv2^u2vA~mg9Iq4 zICUPFr~Ofa+*&tLNPj>Kv?b)vN^@z-k80eZh1gQ>ba5_Y%e!*I!3vv#z={t?bBW2@wi zqEy)fY{<>Hl5n-!C6qct70I^^QMj`}q~-M>zG>|vE7pY9BOc|-E9!E)pj@Y*AWqcXHFx+2EF=&n^jSU-|>0XKoS1IXYzHsJoL6 z2(vgeNjNR-6L$OqH*h>Z!Ps|>2=|2RUwFFkvhwG18%617s>w_yT$TW&3RI z=Ez^f07GO6?j%xHvi&u*id_^0M&@igyYj+Ikx1dam69Z zsFyy#hw^%iJo{wAj3S;N=st4iyDCi6tK~vnMJMUdPkp)hrgkR6hV^+|0Ex1 zFilg+O=}BU)kZ}L^2po4`+w(XsgJUCDQ!lXBOTwt$kwl!)Q}%&L-A;da&(2reh~;v z4js9j7cBh&Cn@})dvYtJ{y@f~=6eeF2HdpJhgB32b3On5?Z;;9R!2mMe)wI2qqj<@ zXRN%N7GdB6CGa46|DES`p7$ZY8P7bjJeP>YS83J7M^U~*(gCXh%quStW_i2M(S)=Q z2%gkw`!n@FzaOi1# z$t!J-efF9}C1D%W!NitaFu4wk&K8o`ZAE zao2)Hnqg=;E8wi(fB?y71~Z$3^o#xJ;7_|~FLLGkbYo*dl-B18A}$PWr_9V!xAvFJ z%xP-m1Br@dEJ)({4n;xU;Bg)$)z-l9aQ_b?=rEB0 literal 16749 zcmZ^KWl$Vlv@OA%;7)M2;6a1CyGwxJ9z4k4P6+PqGPuhyf#B{skl+x80AcVq->p}# z-k;k)y1IL>z0W$kd%9|^(<5ylO{?o>?cwdVky99KE|Jjf(3j97&Dn7hOabV~p6Ko51`B5SXL6SW$h1}SZ z89s(#D;W`3iAi`8RDI}YN&dyCak;IW@0HAx<$YSglI44Hb>Nj%=sux|NcW?@Z#Xmd zy%fxOe&BI{|MGllM*LFj+RV^G=YqZAZ+`FepD+G(7iT?&YyD9gRg=!wqGmJeGhSJ} zl~-QSw=q8*>u&XW`U<}{ad5N-xW=$ZR4K%~YA&csa3gc0`69R@+GC)qYSDun3B)-; zK?G0C(!#`3y3!4pGg_b|c7GX^R~--!JFwb1o1(IIk4!Ls|M68wP`o0VXDOA_u_4XZ zdYmGt#xM<#MUsFrp14HKCY`1ukV;?kZNENkgrt5+d^50jbCFg1Ej_;FsaN23a++w< zWT@~rTjk~8+22iW&D*_02A|wtrgIAfMT6v4*6`Qw>k`b|n!cKsP1w<~<0Pd7n7j2B zH(f>l{8M1Q@}xSN-B+Im_-hvArmP&|7Gi7=@^@LbQEdFtOGPlDozY%QYQ|t!s zlv^-tEQ^@-G3+Gxb#u6A{6sZX>aK{4qLXU0U=z3T4dA$Nn+@K4II+WZ2g+fj`GmwK z{4#s_EA;Y|GB@h8DPhrBw$#*JQ`7Fr<#$WdyL#qf5IaqEbhXh3LR}%r45YsOH(G@y zTj-YVq|+n%`*YGNvu^d(hPZyxYxQ&zo(tykul7x++?%bpf4%!wT{*spa18egR}S@8 zB!^W!JDDn1_n$dO(Buy^c5EED`7|~3%+Dp1pfp~?Bvwy7o%_K%TED$A#n?}`xRRLo zKh^vBQ!ynw#rS=lt7$E;=vZiecN$PpQ>*u!C@ep# zivpQs*F~{&<<~_On81uUZbDqocAB!VYebJrB1S}wnj$v8K=X0`ozfcP{kj2Q_E*!p zHPaXzTldp6&Vvc}8SJm{w_^ffYIvTgWOiZls7B6Vb=bpY7{g@^mnQNfrxK|CBM7_X zCcXU1pn`FL*fNHK()(eNGOb*qoX6Tk;0n4h`JOyOwOXnppoZBa8xs7zn7+LolOZkA zHmy8QEvX2VKh1y&*JXLR5p}j@!zru5Q9zq1UbY;pYj)ZZpvnQpQ>YYdB$)N7YA6@w zr9nZB=~U-(hgpZ@?DuS#yh6={zI?UrpVqzzwTYTEx64ZUTY{8rTkV^%QOjXY)3gRY z-CCYrt;w)(zv$8~1GN0YY-wqssH-RaNfMMajUUVl-n}y*)woLEpup1m<&hbnklEuT z$CEC$RcKH|?Yj6KfAk6PcdMoHy;LVB2gl%)(251Oc&WEP=A;wV4D^ri2k|J4Ebp&L zbu)wmZ#Z#h+bWyZg7X#)=AY)@5}(w$%Ah<+!8lTBm8G(OgG<(59q}<2VLe2dnHv&? zP=l!?f<-f&nXiM|>L&Ig`M8=DX+K&vS`M4SEK5&r)l3G}QHYx{$u(rLRfde@;PBC3DMNWTjK)Dq3U@YTrKd5)xHi zUarISYVsPL49c*G?`5ZHOo-XOg3kIMe_}x#}>vIj@ujfG;P-sTf{O=XzxFY&6B(5Dy?tAp>M5_X24T6AOhQAR$5@O zzb9(CH6_drZdcoQ+&u3tyH+?#BG)c7)ZwFF&HOG#ALF&>HjIU%Xoa6t}nCA3zLp10* z$%?J*U^zKn#XWYLGQ!u=wYg5irKK)wzlD%a3g6%{MDzoGqbAV~O;UY@FVnDh9MqKl zrWt)-AqVDzHPt?ZL9)gipJ%(@%LV+27;02pr2jT0D{i)JIG7{fHi}a2$|ncbYTyqV zOjHKEQ3m)&s|EWKaH;`x)2w<~choy1i7i!C$5_h7^p9?$Ev?DV?TG)SKGwc*mdB{7 ztIzmBn-jU}ikATuZtA{biWRuY5WxBRfhoMCN$&|&OkDM%gJQ(9PXBfB{%7SfhoU0) zN9m9dUu6vo1LKeoY53dIz&EqMk5iE<2u%Eu$`VXWdxU_++rfq6kbvgZn+quxmgYw= z=yFHkY~4DTsc+z@@lChy7*fu`$w+nzvTv^|9k$3kslAQ{jTc_bi5Tn1<#Y1<6EXIT z86N+8t$38kMs1#I?$(<1`*)_j-9ZZXG=;8j?7po!R?_sIXG<2bC!e~W$og-n$a?Mb z$hZbx%6HetR*#mWO+1!GZYL^f3T?gwl7&*~alUq5Ev(jd$lUO{iN6}wJ9&liv@dh& zXjot7iX%+FKF)Fi*hmrF(iEDW{P~}vDa&NXX@tR+Q_w#0okTj7e@*|S$6X%d(kf8qYfrcwGCH5N zTh8DQnPW3Yd$Fx?9Ip_*0n>0?oQJt>`uT~RzYzWl4?b=}Vj8irRR>CP>(I&=>pC}< z_~-Cl&N&u76`^fR?*^~mf5!&+)cB6q znZo<~%y$R@22Gk(6LK6oStEdnUBmz-s|NCcIJf~IB8aUGCKBW{lue>&GywC{8&ndY zHI+9lLklPt1b7XRG;s18!h-~$9jGC5L#ZO!@6hZ#A)2?{Lbx=s5fYxzP|rC_v&_lS zyYeU#qsk~$uC2x>(srOBma-^WQv|W7h$@=jDTJIlc<-)(X5ofc22b&1Pr#G9kde}~ zA<6Q(_cnfP_lbvY zRD-!nSv3nGnSxg9hUabrM_{*30QZ^qRM4|q2iWRa1F$J;V$IW{DfMX>g0-@_b_-~q z#4@lIhmcOWVm|^3+_CQf5)1DwdHc%)r_nFTOf|KZ_5sg?48aGaeA1cdu=v!VvtChh zK>sjuotj5!O}lrttaLW!LtOhMGaG5^%K2F665R~AaAG6T=39UQOwJPKyt%>G9xj&v2&`d)dDUrKEQ8qT_ zF>g}-@uXVBFPhNqAbub*vJXeaXHERxPnKnBm4H&%JoK4_H}<%QGm}zy3DelX$U5rQ zpi3o~QEwbJMUCx3mML4}#9)A=WSoZtoC;a*~rR0lI6z_ z6umz`)ul!SitYS8T!-dpSI*tg8{03HKhGpC2uIC7->RjS-|l&raeqHjU{YH6-Vl1L zC+I3YAFZB{OZE3Uq(Z*^>-!4h4YO!Ly3vg|#Ng%}9bt^%;zeas_-3y!!oZX!N`P`|?7Hj`BCx{UeMG z1NLMt(I}<4U|VGqmfSr7esL$%(BHYp#{~$w1jY1kgc1l3r@k^dGEpTmHbmm4mJ|Dc z{G>^#+HMH$t7;e+({7Z~@g}H+B%BSDtjm;VYq92A2TzP`3T(0q?c%fpGpA~p8bv?n zfQ{cTw{t%osTrPjZ1!<^=O4a~?3G>}4b1?cxurSBf)eY|Y)A4Xm*M?Wd*;9YiaZ@S z<$jIz%R)W`;Xl{|7@x&{YbPwa@w8^CTN*n1iJi~rtfzOZT>MRGrxTpMX4%h5&k|(X zPvIV;*v^hE;#m=Z+B&GCakU5a)sYHrkCB`G@oI9*8ER^*!TM1hgI?RF?)}Tklu=11 zx}pNtxJIozUFc*?u~Qeamz+13;IPrOYEfwAkU zPQKPcUeRu}vgdV84{Mu00E)vm4Cw76N0v8h&(2Fkf1m?aR><~T2)+ea?4IupJxl$_ ztZ5gXp+W3Svi`Zp%bS#Lpy$ZbFmYqhBmQqnC+JK2&p|M7_9d~I+%2f4u5$DbXKPlJ zWhRT#!jGk~wdfgLL)}l_XohEO|7w?wKV5qnYJPzo+5Djoy^`y^M_WpreOv?HJ9gWI z25-&KMx9p{84WOxl0rj=p>PJqnR+!`@x3wyUdw6gwfF;ff2 z*khAn734q-qDv0KK~ba*yCFhyD35kuk(${m&eXP*gRs$@`rAvM2vgz~vV%*HUOShL zX+1SB{!fN=TdBACy{0ZJ-mAW6)o0y}eoPa7BZHLXlus3ueXBSj=PQAQi@J`LYpT&P zBR)2BRHQ17DP41S?6HQjmV&?QtgV#>E%i|2T333b2+L$?Lod1$Jh;cJJ?vdr= zZW9YO8!ILw&3(I^d>;+fE>Rs;i1Lk2!o0NV|E;!J;t(8 zGX==Lp(Ze(#hflc)&#o5acB~#s2*r>B-N3=h+@Z6QRU|0CM<$7a1G1B$v*xp)cf>k zF{AcFk=P>QNXaQwR6X2CZWCeV#J>m}y zC_dtU2dlxwNK1eCJW^=jw0G=aqxr&`ql-m~3@~~{|4|&RkSsx6gJfF1rkjDgatdLl z?jy4j=RdXXqKUr+(s`YqAWE23#wgxEH6@ps@Cnp;w~)Qnv|IrRYCu_W z4hHjoC@v-veZjmlJ}W=`%7?V8{HN zX?fmY5?U1&vom1E2FlAQhE*@h&^pDiEFN+<_$+nGRL^1-f}fNoKTH*MzTVHC#7+A? z(_M#c_heodxhqAQ67C(y!G!R2Mj{Mmp`i?@ytG2<4opPYDPXl` zSj#3Nnpl30dv;vah-hbWVNFj*f9jKwEkF1`7C=d&{*oW%D_6l<5}YgGo-ychKwy^~ z9^T0wwZsz%`GDvY0ur{CZ5RcTi_nlcsy~MbY{CIK0l%dzyy=kKIu3tDKDz1Y^WQBy z@>QIMX5fL3W*{y`j~`nY9DfX{`!wz0WeYpC=06tzx7x!L8DXaq{J}2KQd6n; z)WI1U2%nISfYV8I!R>vrLx!$nKym(9ONlLj<=2%Ln7+x|!Q35yWq>KP?u>9RQ_i0a z|K(_QJtfgHsd>mBs~$TwO@659Dc^3B3z{|b0gMmj_)c7JK;{_5^zEuHkb{a;MC|Bl z47MC&0Gqg06fGGCia5ZxW645 zbif~+{6FG^8iG5&%_1J}z%owA@psr0CHmS=-$r$oqBS%2FYctCyp~AJ*H41Wj4vJ1 z=jtP?=jzw@=Ii617H1=mDDtIAdr!-gi>uO4O zcoKaTG4F06vt7APVLC|p9K(7O$dU%+YVG_UsW(;8D}U5{_!_7FnnOsF=@c9D>zB1D1oo05ztR2-s1OL`XN2uB@ZS(ZnZHeF{F>dL z5CqHc>tPPZsrY{2mzDO2EDaq@cnx0D|jY)URW7v1g~+oh&zWGms`q1+ln{3P4upmOB9CmWKn1hs(B zaugR55fI`gJ(AAF!0H-lEBma!|ekMGQi;VcyyJ^2SO_p`CN zJx)!ZzGN&OTy^E%&;%40>xB0gxAm3R{}2*l*)+&?vi`*WVGL;#rjS3-<*&Zerdxo(nC)hX(uzL^-+N3~1JlR8>w zkqjgzGT+Wg7Y&i8huF8&eF4g%U7miVRtkv^W4@33X@3eTiSfSmFtimov6JX4iMhOm zXolt5AmqK7NhVPbic4MSl~x*GLcBM550f)d33$cC+=g`Zlyz~crLcCYM{YJ<1y3^MX{#1$gDxjq7Yl)CP+9&uH7pw;reZJk+S;FOlQ5=57P?|Uw78uc6sZ- z==zPTVA2jNtdA1KDTjYPSnY1NUWIj5qD1snou=TN)F^8VVVPlXAq8U`f9?0`Lyl1e zY6DD}V0Flrk1wm$#YLFRwN_uOBF?|o#!9Shv=Qr6%ta6~p5&!Imc)!IuJ%9HWc1t- z+=lqMS5z7~d$gKBsEdZZPv&i{5m3M8eZ;|D4J!(rC<)j7VZO1?%H-JL zWYvpWxHPm`3VTVEOFI<9c`6Mr7a^lysa}Z7Y}fr|WkAIl7oqQ|MUGY|LZr~J@{f?{ zPn7u(${%C;hOzyL^Y)#IuSo$LD!ri?i@~PLFy{QX0FAT0#*Kh~Uic!h&K~LQ4mj_$ zF0?**{G#V-^px;bG`pt1*zSVRPnWxzl;5w$y-W@T&V<%D2EXH89VX!tuZ|*>gPIPw zJPM?Gl2=9S>~q`Phg67Dt1W^44k_%R*Y&B3u2G5LUMg#w4NL^7qFtUrSg4KU(ai@Y zu92>d!^LOP&|`yKHkG9FT)cH!MembvxP;S*;!};aO3p{4GbvRtM;b}>SbLjG4x-a! z@cppZ)3?{=ykwP2MA*@YC)@-$+SGL|s4ve^Nr z#IkJ;<1Yi9U)yc?Wwu59Y`Rr3GC!k;Ho%$DpN*H4uV!daG`k!?K%CD!6QO7GS!tsY zu!-b-z@w*r6sx8kp@>6@j86I-19>SyeI<1UpgzqXiQ<@5hEr3(RGx^)@0zVJaWY1I zKmsBfjtcGcZiAooXtb?2N!2#@|MgpKkoyf`Rd)U=HRDBL>6c<{mjN&`Ic|Ph5Hqh^ zlC!u75uU#<9Zr@y`6@%{w(wYB;JM&DPYo;g>-q!H2wdK8^UlO2IWE>gtBSSs^~^0H z>sf{Uy4XZ~e`iQfZR-`Ldbx6kjd}OOeAV-XUu)oM`V+x_)+#@}rrN{}Pyvtf7AQ}I z_ih$XDk@U|@65W2#$pw`TFP#$C08dX@Xer-JojHUw(!ToRP?vLb^iW^v{7I88ZN(x zm4_u8aeaIe3lqIUv~4H}c+;}{)-0^g0iF9}h%hoKEkhy*WlURv6@%(7xFlPWe%CUj z4*w4>Ff)WPKF0dTUrIh{Kf>R!IZQ2tVFK4`z#qu^%xkRilU~LGtJ#o1LUDV*D*f*U zScDNXYzFzzpNv&Od$EUv_SKU6qXOw>fnCm*q;C40j>_|-QMq*;L-mGf*ID=y?Mo!a zXL);Cmo4J!Rw6^@u>56*{q8D**&VyT?e%Hqy@!^t&}joj`dx>Gz8|#x-`C1;|-DBUSo;4Hf^X3 z2np(978&d}C(Pe{DZ!X=f8I+Qdh!jTf@%7$QKJ>fVRvQUnkdZ;dvFmsT4#`OFAr0{ z`IqC~ZnXjur%p$m;n$MlFP7(z<@``9{}#_85$fW~bZOd2?Z&_9y? z*Cj{gRmDE@%><4|-y3fJAyrh^6oNei?C6d^$)g9q@%?bwzS`GrwS&j-2;SxJ^}t39 zC&WdlsX8;t%ioAiqgZ%MkA@Vl>IZnm-=CGnA{niW{2|-AlAIkercuH% zT6hZD?}3o#gtfOFZ=k&%4L*J(~w;zRthS4EDV_SMD2uFnB$wL3pY-BJ8X z-Ff6K)>TQ|P?BE?u7q&IlKzF$2GC2o&PMQWoPS!@H9s@_ zYSEQwqi-_?2A=CRNVXegxV{7O{D+!RSJheJ-3 zFV;j-nBec_zKk}@B!)b^NO!9Kat)1WxA8fi?u!6n8!K8jTfX1S;3r!k3;1drAAqFt z9zi)sna?Z(TbLQVJq~r?J13c_YKzc9$5rad`o5QB3*#mK#|%yxFIIWut>Rp8?6u?3prix)~hCu~DlK7!qMw*>dgP6Tvr)D80A;Y&Ez(KrHH9V>( z!b9As=4w8`Ks4c(XmmoUyVs%TPfJ{=7ByHkd{zT3DPF29zAsiU9=N+~T{xzdUq;MooR9?n zJ9h(pw!9<>?$Ere{H&g)cQKgwc z0CRaQ1bsG_h?xCzOkVG3))b+;DBqu}8sySs<=A;SoPdY5;xlQDR#j<9wPLoBK4s-tWs9n6T0qWuagZaU?q???<$OXI{I6OP z^x6Fn{GeA7l&f(PqN`Gc(Bso3WBW@E?;9}uxe2!^!h(g?NH)+GFT0viv%K5i6J&p zzY$Xys$-yaW26$&(Gt|Fbo#55ElE(TIQ(JY?6bnKB7?LT=0hUjs;oGWC|^=VOHgNJ zZBr3+Q3m;-EuhK#aL)=0B>M3gqx7C{zQi~+0|iTN9x=z^3rSZiBv{)gU+rdwvr7ug z0$={;r1*_G$x|vyzXJHjyp|LNHw6w}QnaB0cxQf&gYs_+ivD-oS_4~ft;J< zs3qsqM;aZ1FBI8gcYR&$ObY+}b7<#%F(ua5(c!daEeQiHLB-0`{AQ0mBY#`YzJp$k zi3sfgt$ef@@wIfc6xhqFVqvjUxY}}O0Mwht_7I#NY*d7obqG!l_UC%{@*OV+g}3pe zRy+KIFGZrE_b46!*1&SLn^VL!0|d?ZQpIt(U5>C6tdD{kZ_Swqr>W48eu4zFZya(Q z#zm^B(5ezj>+EQHtf{s{aV==jUT@9usPXuJ$}anhd=+=0#^ZT>d3EY5u9d5t5O`Bh zNE?b+e`9XR`{qwRLX%ops3yX;+*lF1Dl=y7gk9|xq4l<0moL5eUhj(|eYpkzpbYdO z5Mcqe54^t1VXQV`(7jz@^baL)U;z(}0|WV}gvmQJq7_-d&flfQQ246QYxDW^bt{Jw z*|hjkW^Trkg>)#Jr znn*yK^3d8l=F`VBTrp8)` zL9=^%!N2FuzqiXTErs z9(c7Z;5zQnYCP**hfca2^P^!N4Ml(OSf}#$!R?bo8^}TWdN>U01~jT3^d#qFb#wzA zxwp=HUp%;2V$h!n<*3)7465!4ZA=b(JA&c&h?pg87*%iZXN9dEoW6Rd23c zwvoe=yqr4%V-mu;d&@Kl*|CE@@WC$mc)4OQCav$iq*rIY%I}|LUHUOvkhEgTKMLyB zxn|Hk@N9)X$H$;KK0olcf@$0;Zo5vmD*lD#?zBL|Lt~V$tq1!zic7LeqYGW<=5)1X zQ-mCjOR7tPC6yZaS`BJ%m~b!KG6h2J=fxiJJDna4=vob}?{~F(x{oGi z&JLe4-g|qGOdM|oJHkW#&ED5B+CA%D{zqS~q2N!4hYWa9TEZN&`g}x3>?f{ssg2ci z4LB4FM1tiV(z3Os(dj}CO4>`QRDXvHm*XtgE}Da^EyW z1$mmsKimfReBEA5)7@Tb2zx&qjAeIsACDbxolCY{_XK#KZ$0^^yX|boZP!b&+q=z( z)qXv6QI7W}cBT|CA{I2-D3;!0-Z7E2p%jSanm3V^p%h?bb>fZb10E=T*&+RxzSa-C zqk>{>{yxMLZO#i#D#(jzh@Y8OtUv8@YqTWh7v3eS$g{lo;r7Dp>n3@Ae7w~MIfl1D zQ1R!~w4vO|;b*btJgMY@yzhpcv-w;Wo{ct65vbowQ9n@%kP!=#Z4`rw;r2W#&VGP= zgW$1@#ad0B*d<`x)la#jLyVt37Q*jB)TXwA?bF>t z$_ab#Z`ZnwofBrJKe@th%ScCd_x%#0W$nHQ*wm>?Q({HuSuW%X-^x zqc-t2ik4by4Z-xd+RO>gA}t)sy1i2;e=Q=*O51L|wkH|EJ`)^E!-xR|Eq&(Jtxi5gDRR4&%nRT)oa#<~U|XNSz6_>~@)ct3!npurAaP(h+Kv%sRPAM~&l?$Yu?r z-$Z))lblVSG7W@6AFT6OEy`@ynM`)}B3TNc9fSjf`pXlXEM2rCC}4A5FjH)ke#<#N z*SiY4^;`89{s~WfLgXm9N zKu=m-t!9JoKKMys|FISmihLGO` zeujGS=9@w_!9RJ~5;d8U-1)&@S??P)fAaj|zLJZ;H5%3Zag8ZzMUFLM9!(8QeEhST zC#FVzx7{SB@xShSAHEn4{!y|!KV7}pZkoN|wcGL={3G(e?%_D|=N^WP<~qyH_QYwk zk>@PIdBRz|RCBw`-mRQzqRt%EJ7v{idr~e>G-l#gRk1~a&|-k5ei%iMqM~YkU9_i@ zQE9Z`d}_SvhVBwj8HcoXk3w*(KKc<^$Ofh3N;WO2D{j-aswb!D$HuUv*~+lwlNQz^ z@co#ii=-&-#6Bclq>CQ+PeMe$XYtDLTdt`UnHrJ~$31+mz7pbGMIQRQ!`DSXc?8&p z*8nZS$D z8b!FIfS4i~$%|SW%R~nx;*V19c`z(d>~S(Yji9?zz9F>DwaUj>$%6zFZaKyip$QM# z0EE?{5g1cfysR|IaWT>UUCcBzDKWzPbko=jQ@n=dgd_~f();pR#&P~v=c&DPKN7@b zLz3~Y_BTs1ZPfd+*DP!Y!5dlUooe6W?#rQSQ+FeUarYztY;9KBNY~x%AJd*`8pnQ^ zk;1PYuJOOWbkbx#EZVH|)Y;5B_Y0>dyv~Iut^R{-$>vs!g`Aj~xyc)|5W6XfsDJLtF%;2|kALY$F@3I*{P-HkfuvuqyF{U!>8=HXD!y+p$>%0+rB_b}TS* zY<=C+BP+yQ)Y)VGZ!jI5DZ%AHUF(RCCc$M|179!?#H8zQwrikP@WJ{43xvcgMTTQ! z!J%qVyE=4SI0}^@?C@?QjoGq%ziJ6loC)Ne{cGgRy*j&lQ^QKo%3HU6Eyr({h70#O z34T=|RY#}>MQ|?at9pcG6pQ;euFh9INiFl#4h;|TFn^H4 zR3xEuPC zG+Fzag|hy2dG$6X?*YvJ9F>x0{3CAZ^%T$Y2CZQ1G>?{$ninA&Cv3gcp;WES24M+1 ztW{n+LG32QE@!$I*{t@6esmh7ZF%E=E4c6`P=XYi;}g;TK#=1 z;)I?rfZRxdh=(1n`Sq1wVBt_l_fU&Wgy;}2iemTnUkNm;1iG%Jg%eQ-wtC^Mep?f4 z*DJ6789Tx|2J{Su=UVi*!b z7PK>tMz5P(OmpKo8a)1R3RJYKvP#z}e4G`MaI<yp7Vci3=`)&KK3AVCQ~8Ubm75)Z$5Q+)Bg<`;frWQCtlCYR z6i)LWqOQvEiHwyBDdUsX_sfTx_s|_Ot)wW6Pjv2_ddX3`a_pAtJL-!a%^ufA{+^sq zKm8;`@G6hO8SsP-L7C$I^LMYL|CQmd+(^ZXyXU2s8m^R6mJohNaulITWX??}vC875 zExE-I-QOW3E`LVE z&PBjt4qNp5x`@0c_(rk_;WTc}nVvHnJ>x%$rd5uMa%@~$SJ@VoV}ZFL?tsYWy!3wZMTUfuYdH)bL+97D;YK3O%S ztqWOV^XR{P*k*%X6CLZL-$wz<-)eA0I{())4VBwpQP3`@OULnL8wNN0|poj}`(p83!?%LOn^ zfJD*5O3{}VY5gcT-v$I9m<=-@2|zU>>MB8H180|&;a@}(b8r(X!MP_Ma90XVj%GK> z%e+ZF#;-EM3H7Gnjp9rg+_yL-ht6U92cTBl%V>W4X*j^PC}7R3R)CS5<579}aFPRk zQPY|Q?6lH`l8D%YYUop%f>SC^9*jLEIsF|;f`9eW@KM@NMGLm>wXJ(xxcHCXy=MIf znk*j|LF4qMb;qrTniol-{mDk+nGNsv{wPNJ_i)(ezoC$H`hB?trlM5kjMr&CDu3NI z^S9;hqnmL1HW?yC`r^**L-^rqQ^w>WibzBXkzMD6-Rf)v;Sor%^!&?TbIX*Oh^j+S z%=JemuiJ_4D8aHLS-77e`A(=UoF7(u@39^1UyCC?^lmz4_DFUgk*pnoopi_%3zCTf zbbiy00u8-j`YHeao3(wML&kfPP8}x_5=V83@7`??Qj+ zMi03*Pcg?HNhq;?Z`MWTW{LfJ^ZfeDM%s{!26DqR=Zv9difA`*=A1c{%mm#bfUVCm zm|-u*?;XPjDbm8GOxWwNf%RURv=TjVMxChcWmf>L*|rWJUvuDq&=#Tr6HMhv+8}a0 zt_bx>$Ui&isvUHXPss!qyg7ulHBw4xO0peLkS*WI=gZlXB|l!;+Y6VUWozB*6ioeR z=^9g|L3-y#KC{US(_(P7Y?)G!_p7wrZ~Ec~XHSC7&(a{9bhm+4cuF_iEjU8*^(IYl zb20Wo^Rpq@c5MDMe=7$s+3osaH+(F=}Zl^ zOHpw+3q^(T(jWu4)cpr9+5Z1Z3KnGV2=e>hG-8$6&P$ZQhn#@N-L-q}t2E9J)7R#E zN#OL){=kXleL6b)S>|)Sqj7Hk_59fUy3}@_>k=NQ`Y|(oM+MC5HS+m1S^6l=5@H>i zu#F}59~XUi7yM-5b9GNzn0EKG5q>2T6UK{AllPNnv7f&kO_sllX5D^rf)fjc*O%08 zIp+Rvu$i^yW48}h!hX%iqw(^FZKaJZP3{9L>7spSl%}UqshKJ_{@b{pN5o@(_HG%N ze*e7nqvGz~>rEAt!zvZxDN!9RUr=6cKK_8-cq0<*wtUPg>McmK3nwp829jB#hY z*7R^U<@EdQGT(8;R88((c)>J?q}0Z}-pzXjqcG|ZK{2!G;0YPVl9h-P9FR!NE;FLb zD%*fChxF+Atn@eI&a~!3?YV$bk%A>GD6+<+cH_4gv7LHfUq)sH}0;Qn0rM zdoyhrWAt+DA!!F^ryU^9|JBTZ1~J)+(3-*%SX#wr->6anN8eD&UtOtWvczXL`3?!8 zi64fo%EKM_%t6l~JX|FsqzL)bopF^+xb)Ut8A=rNn4(ytt<8m(zoO{!!rAIY{TI70 zuaa4nkSS$>WZC#eo%n{NCg}4R=aC_(oZZL*rM(oU#x&U~Kbco+w2+nl!HzKmEwhhQ|xUX>X z@2423K`k2_32K}2dZi#ULDF@GE5+dUG`l>9byFltC&)fUm#&^Zee078PM;Q1(Fy|n zu3Jr-U9ZCjFh+kt>SZKTX{}ezffGwtfk!5x+uIS2bWSn3$&?R!jx2p=k?-X6oozF@ zxRty?N~BW+H9>XhoU5^!DW1~ZSQ=iSj8gWhslU(MYkL4gGoT@nADgiShsnwkpqb=h zb(rxR${JT|UkgccW7LC*h$Yb-tC2wgrx@|Yp-ERONnt}&irfN3_xEE)XamZ!$?*V* zm;BC-E-{{$n7V!Nj)_qVN$u+JNomXC!Dwe=)P=M4k%_-QnNHQAt%}@i)8q}6jGYM!;==UTAUw0Jil1g zMudFfeQ#p3Z}-K+mU?L!6yQqBJWzrAr#5161r#$+K=^8g7P=(e893&M^#1B8khL+U zHc*5Mp5c7D_YmmcN3-Jq4}_lW{R~uc38SMgxO^cXBo!K(LInyAHUC$w{7s`%*dqnx z-XShd$}y~og4T_iikSc>hK`xr2<`hSDE77?;Lr)N%9C+|g9+w8UU z%jfgO&+hmiHU0mq=Kn54eltg|gzIg%F#iv1q44QbnrYvw8}IM)S(pDaZJs|Go(uQ; cy_MqF#n!a0alvO^mjC>g)7s8{JHp5S09G-)3IG5A diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md index b163ff2fd42..4ed8fbc0927 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md @@ -172,7 +172,6 @@ Generated by [AVA](https://avajs.dev). }, dataStore: { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap index 8aef7a5b6b6d3005e97f0686387437cd8b59ac79..cf9dc89a5f94eceefea6dd89376389abefda9f2e 100644 GIT binary patch literal 2673 zcmV-%3Xb(bRzVIKRT=-C`@TEReY8NOv0P%*y1Siyv^!lz*)6M)7Ldn= zM{U+KGiPRRJ9F=F&z*L+O+kSe!$YGHBoHxx1{(YWO$ZV&3W|Rii~&vf1C1u42?nbK z5cz||-aC(-IkS7)Eifk3-DKw6`+es-=ls6kIp25Anfr^mvYS7^zxn|4ZNJP8l$d`J z^(kjQx4A>Be9`s!SMDzqs86rmFeO7r@J9?{Tm;}^09OH60I(lG?S$~l0B!>?NVI;U z&UhYOCZt(PrBWb$AUa5lkh|)}VuZXFe3P)ahX6zhiw^}wIV?&NK$Nh!FDR;E@h3r1 z3%ZjUl!0?(U|I&K4BRaP&&$AZ8AvF=r~+KB00$M|`wH+!1yEIBPHl8myDYb#<$WTx zis^JZG`&{^s0!Sq0>@O~Wfj=00n-|Atp*&{fbVL+GaB%Q28`&yybj!{3#PR$(~s!D zk96P#9e7&@Mhu{605==J0|xMm6VHD$fS3vFFag&DzGDI}nu6)RF4O-of!9r7s1NvL zA8@D-c(f1rO&{=TACQOvnHVr11HKjmo{j;p#DI^-1=Gnc)1Qe0b8(;;2kwXiPsM>Z zuV_a%X6lfdyL@X-`-Q3}YX zfLl_)Ln+|76!2;a_(&SKBn{k=7EEvKGW}p0IF<%}od#Y@1LyYxU+f2#`+>XrfhSHp z|Dhis1Hje+AU^=yHUJzO5KM3CG@SrRf+Pv~*#IyU@Kx<6sk~cuy*+f9d6&`(;~*&v z5~(TH^B$uttteWx`Apuo57zr3r3G=H&F2cXPjlsl zLP}7G+kD>fST*QzVRe;cnR0)gv%>81JZE0`T7jUXIP1Y;z&+&HPU$kQL>>Du^=;Q_ zwbuS};hNBgFSYq_BWI-wbNo=grYkk>Q@_TWkLO+LZYjG?iFsS-L28!+Jhr&bmS6?t zzU#5ku${Ns+EIL3;%z({9PAB`Hq^1(8DPhj#wNEfWTuwJ&s;8@5+3X1)3(9h@M-tp z+I%YYh)>D6r7H96V2BHu%-ESrq-Y@4$)KBi#Gvm*FzCr1F=%7nb&77DmPabJvTrXO za9qw^v57N?Nv8(LIvMq1j~MlO1fxdAvtliTjNEBZd08$^U{hqQg&zG$QfVPnswg z*2$jVy-W5WwDO;BZWtZMQgX+!RHsadkf{+eJwj$g$jk^?EJ7BKkR>8ysR&s*Le?K4 z8;p<*MaVWp$j*w8ogE=NCqnk2P8pGgHUOJ8be>tB8)0Cxc%Df>;sG67e2cfUwsE?8 zeTn(?X}k6rtNBLT8LKo5VEklfsr6&iPc)?)?6 z^PXL8tx0m?1G?%PHvd8`SGM!3ulVm(P|(Y6$({Gg&F6EsQ67+en_ZzL#JPmd+iSPoblfB_m!sn}Hm#&cq$M(B9KIa4z zWQE^JLgw09_;8?eWvyDD1+&XD&D`^9*E`VNUtymW?9>-sE>cpit;~iEi@sl--nOl! zJH0cL$%Lyt)qs-Lx!P-jujT7C;E)D$_Xhj;V zdE$U;j_tPw>`=$8FrRrr*YwRgaJ~*)r~?%pxLXIF)`7o^_h7n6Nkun+gaMpy0MiB{ zvY148;TN2E8Y_K`<=mQ6u$|JZ+jSR`r#>sWUTem7L}rmhtjq%0t}^AVorqSm9e+YR z2Tqr%S7O}1vTm)h)GN}X(K&Kw<4HnpXdi~hxLd9TG9G$K?no;Xov34DqvMl}w}hXCa}fq^g^HL1w|OAT3}UwvC*^`y0k@R zmlpjMX_Anw=E}%*->vRpMRDS&hwMVT%@%R5HXGTFa*GMvY6AC|z#~GRO>v=o$^@P< zffr2RFM@JRoFW4FWL$uK7H@&a-Qrc?&Za1|c9=kSn%R;G=oB zBGfXAw#WVUOiioTTu-W{+kB=_v7I#niOpvm*IBN(jU8uKqLj9I!;zOzhn5~FBC4BR^HE+87ZNrPc5#u8 z>aNgn&0pIhhnfZOX#gbv z-vsdF$s&j%o^mv|wYhORmCvw=Tqct(&>bw_8S+BSo!RVoZel7^$YyuuvXhzd3@a8U zik-1=ZOt7mO}VAHV^z(`VgnK9I_h~_xbHDqv7OQi8aFipDfvZJJUbBU} z3~o*DJz^bX9=Bb`nzk~dlh&{mIKY~=+S52|*rH1f^@cca4O{+lH7qVt$6;mb=x|HA zv?AqInrGIq6=c=cwAIR|+truXr+)ZU)b|Zrjoov--&N`_TGQ5;HEiWw*DKf#_1RS( zb$HSBD&a8eicQM_Le_BOdwTEKaF~HMKF72Bj@JJhw0p7x=O1F`{rG?S{-1~I;^EN`0u%oW)(Vzht}6fl$M_qY literal 2689 zcmV-{3V!uLRzVHnMH&CqboX@ky!Ua(9YQ?Xs1auOW*>LEw+gb$okZ>c z;n)bA=UOveGuwAF-D6kJ-R@mZl!q|_CYXT4iyBT15RJiCkbo}?F)=VwZ^*zOWk6AY9ZI9C(JS-&SivKa zRxy*w1g7^W09Ak+72tja__G3>8wF;fz;#jJP!#w|6nHcW{4)xSsKC4m+^Gttqg|%I zr2^kofu~jAbrl%VfT9N6qyfh?;1?&JU(kTK4s6u{M+d&71JCG!=~$QPzv;j$Ixrjq z-X8-F#()Q6z!Ndxr5KQm1GzXb9|t}c2Ywz0UW^0pO9-Y@U8X;n0CpsRVgk550X&ib z{*?d*lfYOKxH<{clE9afz|WGv>q+3e6hKpg>A^132UEb26!6s)@K_2so&w&R2DYbx zLK?U^4cwas9!~==rGa;6fXg$$?HR%J`YzL7&j9ylfL~>Rmovb-27r$Z0Lufw-2=cw zC!YT>0FXi8{6U~F2;4dd+&?IoKD*O&5+ntZBIKuoz_8C(Wq_m$PT6sH(PiddK`V@d zq%lMyO|e>V8TFVcJ_~9|({fqCbKGU$Nb~K$cy4TbB)4^Be0t#$$Mx8fP)#d3Ws|ud zZkgY(Wh$57R-9y$`TX?cw3(Zp-oog%u_>Wq#4_88qE(B}7Ch@fy&sW`AdXplhiQ2< zUv4O*1%-sg=WUl&{T`dWRZ?Zjy?M^ex#fAz+~BngK}ojPg2kY7(6;Q-m2QdJ)*@n~qMKRjAj$8MXC9a|ck+_I3HS{grdxpYc+tdUQfhx)^( zokOehsnjPvrFJY;nQQq&T*&3d&Rime1F=R1-P9)reJzAR5BG^d>kE!ubPBXQQmK_a zYhl0baPEjroIy-FH9*$LsAu}bs8>Q5l^*U7qxACR0;iQ~nVq>LS`~ve^5%l!{_y6~ z;njIl=o4=wx0F9~8Pj9*Yvjreed5Z|5UxDfC$1zs#=Q}uaV*s-lS5=mh)fNUX(2K_L>3Q`B|>D$5Lr4zmI;v! zgvf?MWWyn{bs@5|LS!34WE(?d@930~$nZMg>~)=Imgj^RI9EK+Bp?Z&jxD|=+F9E; zUHxQR7Jx*6^9k@F0#E|nM1UU> z;5Y%|5#am?urmVeivYJrfbT_s;}JlUfH4W!Cjqxfz(W%7q!fNqngm%N%)3{mm3fz- z<+5{-nb$B{;em8eZkyZf(o1TUTABLrobuwFB4o79T+D}Xw^MTqERgMOlj(uX4;+H^ zxXF0IwW_T(NlLy=SAEUmAF1WbR-yNb|7HaRwd|CfdAHnrK4&xKKG`>06=Ok`@D znLZ6^iOns0jJGg6?a)NzU(3MnWZ)k%kQVPV6!9QjeUAcU72rw*a1`JU1$a~e{vuWt zPySCcNF;+xoD?A&qrzMb!np{MI0sUMT+~)k;RWjNWRk(nPbY(e<%G{y%MM*77uGGl zyWmo5wl)2^EW`6on$KBsO9$_!$@8%aB zF49)2t<0Epi=J1V*}S=>JF_j9%LNhPmMCy*v@;?^1wKf3MuDSI;QLX*p@}eurh5mL z-$#SMk_;~8QiKevD@a6()L$3H0oQEHYYo_@wo_pqa|7w42NImd@q`K1Otp#gu@08IzZ6R#IF(HoJAbYQ#Q>4li+ z1z8xUVzR2Y*l1b|Z_!0&l@`4fX^N05^p%mTo>Se$isHmk4q9fr%@%PF>y3;^zE20f zqXR$DfhUDN>*8YioDRI81F!0U8be4T-VHt(7hqp`(FA;B&w50xN5p!hkwkhcSM;-m z0^@v_#l7x4GMa45Bc&XGtpF}QSuR<>ci-8%m;XZJUS1>mQvjX;kUWt>ZXv+c1gH|= ziv)Ox0MC(=U8L);5#T=rn1}#-BEV7vxGMtq*Z6;m08uf+RJ$@vP4qsiP^-0YZm4nQ zF3eu+hX{W`u2{CoM+;6xi~>c=7p>P^OFE`neAcX3cJDwFxA?5>*vl2Cu`BHg zH5rRH9CI%1SF-mg* zN|*IONh7i&0W}G@K>|K6AuRtQkvk>esGy|vSPtTZd{b&ppWjNYpS4JG{bwz$BbjN-}V>4BRIRQ$X&DgOAI=(=zaPU-hKfo;;-h7b?K@3h*gWz3Ue#@-YRtPXQiN zfEPt|y=y*7YvDrD;{HxA)R7&bQ1zX)aP8Qe)09Z9UQJE(Z&Z_g1h5f2+*;M7Ui0`N z*mZY>j%%N9iyX={fQtZ>0NerK;gdxWSv(18ZhiCPbgGbJ6Zu?j+N4`qp)=$Kn%kzQ z$MX|YIdgh?TYh>nH=bifbE4Q83s=|N+R~I-np=BnP8J)8*w<6n-NZeY(TZi4R?xVy z5lG1=T5u>Bh+IVu8^KjvunXbF%$~!>0p@bcv5grcH#%u#4c`IAjM1LP8CioaHPjp8 zf{``6KjgW9|(;U)Ug1H6YVOD4mXW4D&W}*f>O8ccBiUF*A`hY{z5^4Gc@J vQ>*&q@jTbc*F2v>hYiPfVkhsm7}+jf{-1|y;^C3E0~7xX!YO!{(klP}G$$3~ diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md index dda2f46cd21..a83293e5cc4 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md @@ -11,7 +11,6 @@ Generated by [AVA](https://avajs.dev). [ { allowsUpload: true, - isScratch: false, jobsEnabled: false, jobsSupportedByAvailableWorkers: [], name: 'localhost', diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap index 4f5c016384c9313c7f9527d5f88c78fd90440926..c8194ca722b2dcf0bb7a4120db64803b72650eee 100644 GIT binary patch literal 1228 zcmV;-1T*_VRzVyLoW9jw}}{-q(~Kba+)_8hTRQXmV3rY zJ>sWxC?AUm00000000BMRy%AQMHv2OAGQp`fBk!DBaf=e@ag z5E5dme`mgVecwOdKQ~*=SOp#Z_Qzy|i75`b;o7K_^ka_;EqPbBmC=7UwR#GGa3g87SVj>oaOrZ074#lyVOsaTNNtE#EjC2z(wKrDbA zVC)nFhL{I3%*9@}t5QSZTEDiMfKc6D_XY-Wr#x4_y^2_6`+O_wyKWlH$B3rZZk{&(;ufKLGY1mHG+hZ*oZ z172gm8w~h}0pBp-7Y58Zz%vf8X`}Xn4tUObB4nsn0+r-KX?ItsS=#ARDo6%oi?XjJQf?`-$r5QKpR{7KA>T-7GR^x#SV0TIae`?{{hA&OzMyT=yFV-H%AMO>wrp^-%(B$dfhho*y!am%8b2;aQt`kr0dUeLE zc{-D;vGvq{vy^?A%a(fj|2x(3L50u}nN!3~FQvMQqPVVZVf8$ zOANTqfKM6lEdzdMz+neibAT%j@SOwv;Q&v%z?FY43^rR!d8Ix{F&P?ope{e`;gDLq z7n`~|LlF=9YeI%XMtfLlB@{a7X)TmoQ#>6J{b*Kk^@(b=rP5xqA`%|aiVmuVN7ZZ8 z>`0~cgnis+O?H7^?}?O(rf;u*5A9^HhPr6A&hnl0FrqUgBh%j8xpOxiJZOzLG&W*W z#l3{i?!rC$fNc@dIlkTG=@}L6wT+6Cnno^*Wgk)Gkgq>rDG(`zXQu?kxHt&hnF6H^>=!?jT<=|>(FTJpATDx=@L8D@zW**h*K zk^R^dbB+Tz0bqem%rn1|ro78oKPr_`SWas$FbVh>+M+aF#qV3EXV<&F9#_%L31}h zXXU3iRb8)2UXLjpAPzCM$$&oQz6^7@({8KOP`J{qY;h51Bd)5nMX9!?^dwJcuzsup z9ygUX7Rv3UaX&uXG^TyJTpnOgpITmC2AdjT)*WKDuNDuC*rIh!X^~p~ts%7zU;ig~ z1HdN$eg<$0z=I5Uh5@fI;57z($bfGc@EZeW9N;Mj*l>V%9pEzu_`w1Ga)1*q@Um+O z#>NC&v<^KPq{K~+>7oA?%%_Bl0XxS!n$lY3Ssn14^+d=}F9j;eh0^YxP}8*2rc{s& z$QI>5L!{bV7*b|pcUlYQh;S&hl2bxga>9#iZBpn;mUIZMolq(;};W zO0zO6M$E)qo17Y9x`iKYCbQg#8rg?2H4bH8t6aCL-8QZ59>kr^b-yvB`!T7uD9*OG zIw*iO*-&;7%&4IQ57roFp|)WSnbj0iU?{9MX>Ye4?--SDd4 z=l8PE$=CeS1HSTrpE8_p9l#&k;zK_0m=B!wftC%Q7=yp>10VRn=RWYG4WAsd5BE-* zb5`go@#MCxGh)q?nOwDvC;pqI>}D=n>dF7_R402kgpSBOPuz4;s;Vf8i?4O}=Fmkk zzX_lX;0Azi0UTz)3kU(rz_owB7;Lr{@|*k& z#bl`Gfx7sxM?-4xPHd{`97R0nt_T?l8SP`KrBLXgqqR_SMe%e%^uyVWt52894V8A1 zC6VxmmUK`yJSuNfy(N{_WA<^6HQ6P4wIfm*Hhp{jyJ#nSHB?2Tah~t2h7p}38JXsl zo!#DaaKAO;$jFE*D()n7eh=>12W*OvF7T~7PtU1nzipI+zS>f0v?jITGN3zn3ymNa zRh#5m_?(iG0#op8dV9zY52&_TlT}`~d0cFF>ahsUP@;NTukXEu&i0krlVM4vQ90~Y zMY;bBsO9f~a(pr*si~GIV|a~BjmNc+5@pkxQlOHAWLQ9iU1LWyMF~k$kzLaiGP*4% zcN?x-mndVVk`*z9Yw|J$N`|`UzxP@r=C{-i#a$zUF}6ove9n-`{|%nRRSc;M007tE BRHjRc1;F$DD$1d+I>S6-W-L~~Z zAlNoAtpW$!I=>L2vb$RcUAq~?|t9z_x-)Tp)Z!o#0PR0eb&k+^C@dxzm;EY<_2;{ zzcHChn?t#^nSAbo8xjdKZ=TuOsGw8$PckI66IcYC2%HKuNLpBO^=7l?h$M~v6$}QE zJV*+XSCZaUD(sb{9fki(#XpahK#~hpMlO@!N(r_}@Qc!Ij*#Ij8U9IzZ^-bV46*`8 zC~%enpHSdN1@2bh83o*`-4*TB*g7kom*lZOL!nTynOQ0vslwY-7*yd36>d@C_bR-> z1-&j<z z>F{SAjyB*O23%;sEe8D9fZrJ4cf&ll&`e`xGk&BnQji8oZY;cHS6K4KGQ;UaGTpx< zv$1$j@e2~cGKtfzcz5x>E=uX~tYzk{MDbeH7Vmt(dxj*)_=+;gO#0+>U#5Cv?t<#% zto+G|(L2$u1#Pi~eVtZkEY{Q6lW6Pd>9VW^^XH2Lu}hv}4qBu4!tHJI+vm1*&u#Bn zvow>a`86v_4+MEF7tbb#@?)PNAW8R?byUmc&HQjqD5T~sb8zgF z7cSPE;rI~wtQk+H`&TFJj;IuC|DUqj)%?hibxm zyQ}MC&mQQbzg!>N>nr3A&%P+6Sn+rY`Ml?EsE}XYaovO|&_uqmE&(h`{L)xI6-P zM_^Y3d=0R;0X8(ibqzult(8@jP`|G*MHrh8#Y;&NuH~`oeJbQH?jI^=c_K4tCex#r ziW!Rw60Y@TYS=2TLlNsRjAmHh4A(cqj%N6EGx%DBqB9)MF0J@IPG~9Hs+g%% zW`mVjo=I5Z`)!D2&2)V2icB({&-Iq%iqGU=v7U8h^#lja{*_iPpH0RKpI@xXGiYV| zt!$x{QVk)otz2f%T$)P82a@T2v8@Wp8S68t;X&)hVOelQX#=`}#q|Osaxn(-_hW&PZ`y$oLyELYFiV173Hl|tRD!J% z?3Cbe85Y}-kt<}lS%&9iI81>>3anIMy#m)Lb)z9$6}Ux#pDOT*0<%?Es=|3Hd{c#o z?8wL*7c6zbc`mrg1^2n&*DlaBIJ!QoP)5HsRw>Ftx zxLw-8SVi?%MVDSnA7kRv{f(VnrcV=nyhEQ%ef*7H%`1GY0qq9#8t@NB9j~xyK)(U& z4fu#r$18l50b2~X%c$!We$;>`4ESFI4in?gPOtFsZdl@mO>X!XH~i8K|LuljJrMW6 zrJlN8;cXtc(*qB9;4x1fuki0Yu**|eEg|f!;}xFcg}?K{$=M3q3WTjI#feaj|{<^LU2+D)`a%U-LfGB=Y`;!5PT!F@9vhV@6oWipqU}d z8aTz;=p=-mZ~HuqnPonY@H$`!_#|*E@bjYY1C~oLAi*UPY?t7d5=3Qqy9|~L=gDw` z40p@IB$XVKRMeOH`?YE39{9OjYdfYX6YL)7P^QUt>?~zcwqthfA6Kev#~!lXV*jPU zpOwlfFN^yUmkM4LW~gwCT6^!NEK*^y3NewbtUBz@`&GD5g&S1ZA&NV8Z|)~lcv^+o zE?DJ)3tX_p1rNDE*I=ILs?!ecOQ!w4tenUv-!p~5;9K2{%Wd|8F9D%`Kav-QD- z#|0r5yu*$Qe$55V*+AocdkQvgoFK6A z$W#U!FY54$4lM?>O+B!&q2J2)rqlLHl{+bu9S?fUSXc%<&A#1?DNROo4YRkX7Jv z1-_}k!wT$t^*U51snw4(qo_H@sbwpyIrbXfX^vRg(vp96nS?K%N$0c4*l<3R6&0%p zC06ZG1bI_gl|}5bDxb+Z*7N;{WjidOXlb1Yq_fRNAXW*1bc@++!DQ~8nfQQ}STb_z zhP0KfK6{M8Od{Er9FJP9rgy&`@tH4^?KjiO_nGCEJ8){*%{6BTMf{okS}U7dZjLNV zB=ciE3P{q;Wv|$kDfG8^`^EB)nXRxA0ZDqJtG=Oj70}Jt2iAIv%UQ8>qO5dLs#NI( zu5$2V_LuJ7Sa~-$P`n!`TPa)xt6UO3rkvwqTjgphQ`K2hA)(|)hQ^v*J=*MP9 zT;W|+lnSp;S{te_ZkzR`@k#?m47gql^1H<OThmfLap3w-cJAKWXZ#5Gp!sonfxB91Pk%01-+ zj~LpYIc}=l20vWqhll;}ydNR~SQvn_#aPxX$KqI8Yc>RmYmFbtCEAxFz{s68l**Xn ztmxi#nOJUF+Ki>jmhe|6U?D^{Dl@Afvub!~s9?_%OGbLvo5@t6#6M=T1D0c1D7Sc$ zJ5|_MuFd4cEtN8yO^yEDv^Jj~S~PFonCzm3ZEbB;v98M+G?QZmqlKxvrgw2JZWcQ< zm`o>fZ;odM#g(Nmnax$KQ(CEHeLS47@o>V%mSWO}p3LkuoelODXK$nJZ|N3)m!lw zB~Hw2NY~JqqRdIfeM>Eg(}ohG%SJ(B3<19om>2{+d&Nw|WZL}pd5H2O6&kVyGg`{zhDBKx_MRq!RS`ZG^+QK_>xzvw@;bUR= zbQo@|m)ZX9Fzg7!Lt%KLUS|9AVR$hNQ888Bnc3bMfkhEGBLW-iXSQD*fomi1vj}(^ zU||ESYJiIx;I0OEx=v=hA*qo@h&ICEjWD;dawM}WMp9*|u9$=n*%eOM(4otrm;D&smVU9t{#d0 z#wPei6Fk)fhL{rTI4e#)wi()+!D@y}L~)-({HA8Oxfz~r29KCE>v!zj-`oP7EwHi$ zHnqUDEpS&0?5vY;Ye;Hq6lO(XrJZb8AwI_;zBvllN8u+?_=_lhxI_GqR+!TYt6O1X zt04XchmTX7c2zXHo|gENxz(0ASaUhAu+O`-s9p!Eo+Hp9LXv775Mf8P4s`5e2fFv9 z1AFoYI?J|sTUsY(oA<-ksk+TOsBM39Z8IeGnO1nN6^t1$XU5dYOSWT8so|mdhfXse9Auf^tTeIG8`qt*)m)%!%Z@LUxwev;1yGPo&1)_ zL4Hdq(lDnczvYyP=C_<%R-rLy<#J}fRqDv~(}nqR|D-ZsUaHD`Il<02Us+Cw9gq2v zRbitF7pZV{9p;PtB^7QGgdC}n+f=wyg~!Dpt0UJ@eYr3W7tD0QTU>SHH1Bmm+y&R# z{M)~}pjCtA8eFKs*F<`ZQO9ge+P-Kk-cUHKczV!f8m7rM>8}SUT}ED{!(uU5>DUSB z8+CZU4qJ4%M~7#0Fzh_%T0u%bP7TNn?~ZR2##~r=XWKXXhEu7L*#$FhCCZ>`+?W`s zdbu%Gp{gONTkWI6A2eX6QPcZZb0<1UIBDa|5^%5+iDsMW#Oy>tlg5QkhI_osqT7MT zfMy9+NRXD`V-kEtg8L^3OJ(cm$zt>DIlJ*`C$;m{q=$j`6 z-8BiwVahb*S0>4sVSG}YnMJ@fJLF=9*6&1)|y(+}( z^L#(3!bep2oC@Eq&-4A23eVVQ02Of0pbIW?!M9zo%LOy`n&(@v?G>P8EUG$+N(%o} zDW{6%Tw*zG0^m!bdy{(%r!sl)r8p_~9;lpjfC3f$d<`zr;0qc&sKN6ZgmpM>PkFtm z3F7sBYAW-33uLl8btsUJ|EO1*nkm5R9n4zi3|qNz6Ny?o%MimM1*C8da0l?ntH>cD z!Cd;aSRF#qYxXVdK2uRY)Ww-Z=k?VjxpZCFiVl{_3zR=6QqJN47epus&O@7$yhwXlN(hnYyo;%aA#+BS=-_rAbXg{;+xxZ~oW$zS1zbMu0=)`?Cm$&fCQvSL$UH7pOXpcZD0+&SKwg@~N0j&W}XaGy( z-8!hd>J<%ebpzbc0AFjU9LcQx;N5O(aBi(Cr-g%eyZcU!T~9S~?SC|k5)0Mc-!jQZT$W24X>g;S%DtHZsmP408*@T3tL_pGT2p)5|^ zsLH9ZS4SsiqxHQh`>Qnvb!9(w#gNorM`33alvX&jwGy|?sD=CvncU#Y{{tY12Il~P F006$jj-&tp literal 5467 zcmV-h6{PAxRzVDeC)p&ZPKWEsLoWP=yRS(41$Pyp_!Q~9H7FRRTxs?5*2P$;dvF_;DKHb zto6W$J#ecBp7+2^4UW)Yl?IzMxKb0E@w=M&f(Ey1@Q4O~*WmR!EY;y%I&9V9M>_mj zhl31wivi~waH|0i8}Mrbf?k;G6`E!OsN$XaIJN*1nFZP7E|_OF%%nLt4%l}Rs4_hqU#=CxHPXXTeA zNAI-GU(g<3*w0*=Jhuc?-<; z_8xQoy!oAl3a=@vFp{>cq?KH{Y1qo9taQQ>$3L9S*q>`zW!>}ReaT|o9Shq#dO8yA z^B0KYcS@FzKd{Qm<=3t#J`m)!Tq2tq&X+zzNRl2X>!_B?oB5HPP)N;N=1}RA+ZStB zyFNrdYbH|Z{#7ZbBPxa3|5{eNn%_KZ?bSy|ACK_W(#MqeboceKt5D%(zCF^%?&|v3 zvlIFlEZ4`*`U<(tw>t_cRy>wM{@(X0D&%LjT{CV9xzJje=!+-gX7{{KGtp_rd*Yq* z;`4SwA)6DKbYCW64$c`G8O*2F4x}@=Tqaj|J@fpvwDL6Z=}v0p{6xpXqULlZ+S^Ap zCnQN9^iL5h`DOn^TFIyVE~PXi_22-s2jJ)coDir)Pf7)PG9-0v0IUGy18}||tO~-) zM*?tV0PYS5(7_`j6!nn+{4@YB2jF!A!ka=655fCF zLKV8J#oI%0cL=tJ;IUBUNCM)ppAEquLO>WI;Y#74IP6(rI4}&yg@sOpT%AaUVSN}b z{8y1ri{fF|u)h(8?P2glpeF+T5jZabw?*K$5onAGRYY7ZE{ejjQFwb4tZ3y(n#5s` zMB&URye|q@3c}6eus<7xFGS&cVkcT$op?S9e~Lmh1|2a$ywx@A6)`v|1{cTRz8Jg^ zgFpi;YJiOma7}|yMO$SRMbsa#rwFC_P`H#N;aV(Iux-EGm%eia6}~0vKxJ=bSk%Q)ci~T5Lbs`tFVMPk9~zE43`DBismSl zDfP8^$vP`BFe|?n7wl!-dGDAaF zI%y@BWYT@9{*kO%YT2=c+Z%RRDF;~3U=Xr6b&*E1y>|*7ny`J&iJZ$ag{(v=Tyx>=OGtmL`sgP4{Y| z+h_`^=$#@)v3GY>aZ(#aL(wT+m6AU%M`?mMJv24RJBj7s$N$I^V{z@lg+Qo^k>rMVCAA^8?H=zFm?KfRk?cEma%Uz z)xR!36>4;XBa#0p3yvu5KsT_cUSLEn#6bSN6d1Y135*z0tWAQM67)*YFTwjIxLJZ{ zBzV0Hi=4>FB{Fxit4e7`FbsVjEhhAGiusOuGe)PN@p_Ojknq9# zeRaLUclh8dKKQl|9{1Jp3jfvzFZk@$5~BV(Ug6n(c%vVd`RjUxEk6wS;UYiWAd0)a z!uR{(yM8bNaCiXLJHGiZ2jF)BXc7JCk?HIneT_H*mISAxBVc-|;%miIf6lbjMMKiJ z2H~qtcu^0*K_OTcf}9v$bUUT~I|QE%!Tlk4!4?lgR~XiaQNHLHo~13}@$o_29iFT{ zh|*9W9jBq{k;yt#LsAcjz?&j)Yy{Ru_Q>6`F#=~t;ED)*F|zyamdWqYaJZnEVapmg z-rD3Qgr4X4JdFLyd>+yDz%cM(;CA501>XlOmta7GizK*Pf}cvzD#LLySTdX~!*w#; zCkvBQa!pcEU+VAIrks1=$8xRhn5K-gdtjb2MYdyWlnL36IkkUKskR+^$Z?B3rNEz+ z$|*04`x1`|eide@aIji?@1`84!Xg#oB3oH?*`0G#IA4Y9RM;kpyLNBhCsg>A3bQ=0 z(gW}Iz*Qc2$OF0tb46F3ws&7L<@Y70Xwwu6UZ%lS8r-SDFE#M!aG(w=^x95^63RJG zA0L!+vp$JY&bM{=o(|9Hpb4HJqZTyrPAoKWfJDkkDya$~jw^!@u{4keE(7ico&XdH zj+9`dlN|CT34SGkA;V%BvNCLu;d&XiJ7JJIFk){k!T}qK2q!AQM*Db!jZ>!z*m&>6 zfDOm@dq-8U@sa|l5LRKPS_f>*Rbjpg$EdKX9@t2#uwI1^)CU`%QQ>A4wyW@LeX!y4 zK*R%YapHnEdf=NLc*z4D8l1crY)n6}@t@NbY}~8CHyxl6(qXOxG|t#ru<^NZ0vnG^ zX0Y+14lnD_VnF-k0~;Irt$c4f?W|O}V>8*YpvR0;%b*;1mV23S6wf zEed>3foJ}89jTMl8bq2=(4527vK7`Wy@t1%o2_hd$v>@3B9O?W^Vw8r#DJAty!nKU zX)9ZO_83E%WU4PU7PVSU?|3`nvp^=>Z>CdcnB|r`bVAw9wX209!AyRgmCY?TH!n@5 z@}(ZxkfH1qdop%^3%6e?|CredD-n{UIiC84+GC@e(g)Uh3(Hxd^t!Ur1*u}C7kJ9S zi`iei`}xYdxq-snSlLS9v8{4Z_|S5Wi({3msZ3R8O@)M#-#lDucGYOJCzo?QMsoJM zswfrRkg_&bU*0Nh>xNV=6;BPO@}+gmC|JjWc`_u~-kE+vtBJlUo@oLy({I$^77c!& z!Jjo~(cw59(sfoGL-Jmu!>4q3Ob5k)ZUa_1Y=El`__`SA_li|`9y8$Q1}I*5qZgKm z!G2eeTp#el>0a34g{@wA)C+&}!U1BCUksR6K8=YLD<}Ejojy262>&+-uPl0n4?gaL zdwuXrALxGQ^}_}~e8dm8_~9u(=mF>r!07?_Pyjw1fCmEbTQM!Jv1(79=Jyq`v>XH@ z2#1Q{{ry~DXl)RzAbczc_XXiaJ75=r?hu?Bg7d|A)=bv|SzK>E7AmYaMVL`(V+tT6 z_x9nzj5)@t?oH)ZCbDKev97v>^RLgub4$}^e6Vbp4@uH@LpvtXOj~B<$ndaj^OK7= z_iiv#gLa8`WU>R6Yw0MrijsHG-g~agEKT>F+@-0 zdXq!WKEn0-{i$4UI+NZ!lo`oY{{WFx?wI1zxZF&e{i7?9SdlSk3Inij*aHYj(r-d; zLmRZ$E1@$5b?HqctX!^oE5U-qF`13&u{Ek#%dv&MO)ZJH4<|>L7F(i(l3T_p@OaFm zjj#8ZrN^RJYrpwnspct*YZ4V5uffU>##f?KdC)WPlqQm%+(xmd7i2` zM)un8SK5A`9C6!kL-Je@fe$!zgZm>85|g||2VSksxTJPQp(hH9qp(~^Ecc1RtD~?c z3Y(+wVL>=jYn$)N_fki5)V~^q2kT|O|11hmIhpEetWwyO{oWXZ88J9QOlNmzzpswL zni!lFgRAOizuy&uZ87*;3=VC8Q=P2$&o#i44d836o&9b|YIh?X(Fn&j!rK}vM>4Bo zBvqE`iaBVkEG-NnNqT3adt+8PS)AFw!+`DIzMwDJ9#1Zq)3?Cvp3~Kx?3iPkX3w1Y zJ?6Zgj-=U@oZnt!Z&lWT(qUd_G>+!1+|XD$%s{hUx53gj*whABiQ;c?`9Z~LS4FeyX-P1ZTVUE&% zSp)M#NK&l>BJ8NvfqA>xf$p8@z|OpZuCi_3y=~*O&3mqGvTpPCYTK)>ZHA=!XFzlY z95Mrz&X_#8%T7FMaHQY{87m0or7Q^5kOejYR|59}zXWtA1^7$}KI$Bf`Wp#U84i@; zG#M_I;U*crA;Yg_@QZ1`ZVpUrF9)U=jhJ1N19SX%b70OatI!y-ayhf#Dt6@Bsltxg zKB4TGm#VU3j&!opPb??Rj>V42s<26g3stzh4m(D^UWJ6k#Jg}b!j`Gxv{(f{abARv9;VU}W1m!>Im8NDAaDRug*6AZwZp>t( zw$){bVILbQTmjq(Jn}E(5R+hzbJp7XB)DFJ??~{X1Th(w%8-)bTp2zt!3-a+9#_?4pY4r!3nY9g$PXC9Hld$tDe(csfg^2jq9wCQlPxNx{n&KlNXgAO<9 z@O{Ukh7IU2V6_475_i9@15Z5V$9g?!fGSdW-GROX?X$VO@D?xRy>O`)Zui0y_1M34 zXdX(`2Q5B0N}%uxhhs?5)jn9`gL8dwy$>Gnf$E2&{P0dcoZ*Md{jk*!fAqut4wd)h z0Gu6w8$}AQc%W%T^RE*j@Ye!xe*m5rtJ&A}g=PlffFPU_g!csD+92E$geRP%X$}=B zy!*M8nBE-Sk?g@VZx$$w2n{$W$WhZVQbMtbIN-Sz&E8vhJ8zX(AcBQ^Y?M zsL9-2xa-W_QU6figm82x!asD$#E^8W`G@WeyZl4bma#iI&9zdUo7*0N?g*R`foue> zjtEddsRc@O(TTS$0^f?j!x4DA_CaNm`g8=Ii+~Y@*#e<9Tx%Qd{$;9#Q8+pZCq&^i zp{ho)-P5D6F$xz(;fgv$Ugd@;d_D>fi=^ddS0_e)nJQN8m#JJ--ilwQx+w;a$Kb^n z9NYkA16(BjGL?(otKQZCcQwE_8sMRZ%8|_618(kP4esq%<+N~db9daSu`{Yh!2N5( zDEaW99R%EWyn+PWY2cr&$HT4Tpp_?`gZ6@H`BOC|o!?d>o&SD^&d;n)xOGP~j&Fih zO<EMZ&c-U*}GcDXQTDZr2X}qy}Gi8x?)JG)&_w#cw-wZYOBO8Gio8f ReI_;l^H1N*ZtO^a007?6pPT>y diff --git a/tools/migration-unified-annotation-versioning/.gitignore b/tools/migration-unified-annotation-versioning/.gitignore index f18dc3a725d..e7f2901fb60 100644 --- a/tools/migration-unified-annotation-versioning/.gitignore +++ b/tools/migration-unified-annotation-versioning/.gitignore @@ -3,3 +3,4 @@ counts.py *.csv logs/ *.dat +result.json From cbe6c258e4d7174b5120bba630a5d220657c5ead Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 9 Jan 2025 13:51:31 +0100 Subject: [PATCH 354/361] WIP: make migration faster using multi-get/put --- .../fossildbapi_pb2.py | 140 ++++++++++-------- .../fossildbapi_pb2_grpc.py | 66 +++++++++ .../migration.py | 129 +++++++++++++++- .../utils.py | 7 +- 4 files changed, 270 insertions(+), 72 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py index 6267f9144b6..9bcd8683e60 100644 --- a/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2.py @@ -13,73 +13,87 @@ -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"_\n\x1aPutMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\x12\x0e\n\x06values\x18\x04 \x03(\x0c\"A\n\x18PutMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\">\n\x18\x44\x65leteAllByPrefixRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0e\n\x06prefix\x18\x02 \x02(\t\"?\n\x16\x44\x65leteAllByPrefixReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"K\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xe4\x0e\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12\x91\x01\n\x13PutMultipleVersions\x12<.com.scalableminds.fossildb.proto.PutMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.PutMultipleVersionsReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12\x8b\x01\n\x11\x44\x65leteAllByPrefix\x12:.com.scalableminds.fossildb.proto.DeleteAllByPrefixRequest\x1a\x38.com.scalableminds.fossildb.proto.DeleteAllByPrefixReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11\x66ossildbapi.proto\x12 com.scalableminds.fossildb.proto\"I\n\x1aVersionedKeyValuePairProto\x12\x0b\n\x03key\x18\x01 \x02(\t\x12\x0f\n\x07version\x18\x02 \x02(\x04\x12\r\n\x05value\x18\x03 \x02(\x0c\"}\n\x1aKeyVersionsValuesPairProto\x12\x0b\n\x03key\x18\x01 \x02(\t\x12R\n\x11versionValuePairs\x18\x02 \x03(\x0b\x32\x37.com.scalableminds.fossildb.proto.VersionValuePairProto\"=\n\x15VersionValuePairProto\x12\x15\n\ractualVersion\x18\x01 \x02(\x04\x12\r\n\x05value\x18\x02 \x02(\x0c\"\x0f\n\rHealthRequest\"4\n\x0bHealthReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"R\n\nGetRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\x12\n\nmayBeEmpty\x18\x04 \x01(\x08\"W\n\x08GetReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x02(\x0c\x12\x15\n\ractualVersion\x18\x04 \x02(\x04\"M\n\nPutRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x01(\x04\x12\r\n\x05value\x18\x04 \x02(\x0c\"1\n\x08PutReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"_\n\x1aPutMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\x12\x0e\n\x06values\x18\x04 \x03(\x0c\"A\n\x18PutMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x9e\x01\n*PutMultipleKeysWithMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\\\n\x16versionedKeyValuePairs\x18\x02 \x03(\x0b\x32<.com.scalableminds.fossildb.proto.VersionedKeyValuePairProto\"Q\n(PutMultipleKeysWithMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"A\n\rDeleteRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x0f\n\x07version\x18\x03 \x02(\x04\"4\n\x0b\x44\x65leteReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\">\n\x18\x44\x65leteAllByPrefixRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0e\n\x06prefix\x18\x02 \x02(\t\"?\n\x16\x44\x65leteAllByPrefixReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"k\n\x1aGetMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"c\n\x18GetMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\x0c\x12\x10\n\x08versions\x18\x04 \x03(\x04\"s\n\x16GetMultipleKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x15\n\rstartAfterKey\x18\x02 \x01(\t\x12\x0e\n\x06prefix\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\x04\x12\r\n\x05limit\x18\x05 \x01(\r\"s\n\x14GetMultipleKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\x12\x0e\n\x06values\x18\x04 \x03(\x0c\x12\x16\n\x0e\x61\x63tualVersions\x18\x05 \x03(\x04\"\x82\x01\n0GetMultipleKeysByListWithMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0c\n\x04keys\x18\x02 \x03(\t\x12\x15\n\rnewestVersion\x18\x03 \x01(\x04\x12\x15\n\roldestVersion\x18\x04 \x01(\x04\"\xb5\x01\n.GetMultipleKeysByListWithMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\\\n\x16keyVersionsValuesPairs\x18\x03 \x03(\x0b\x32<.com.scalableminds.fossildb.proto.KeyVersionsValuesPairProto\"n\n\x1d\x44\x65leteMultipleVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\x15\n\rnewestVersion\x18\x04 \x01(\x04\x12\x15\n\roldestVersion\x18\x03 \x01(\x04\"D\n\x1b\x44\x65leteMultipleVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"[\n\x0fListKeysRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\r\n\x05limit\x18\x02 \x01(\r\x12\x15\n\rstartAfterKey\x18\x03 \x01(\t\x12\x0e\n\x06prefix\x18\x04 \x01(\t\"D\n\rListKeysReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x0c\n\x04keys\x18\x03 \x03(\t\"U\n\x13ListVersionsRequest\x12\x12\n\ncollection\x18\x01 \x02(\t\x12\x0b\n\x03key\x18\x02 \x02(\t\x12\r\n\x05limit\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\"L\n\x11ListVersionsReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\x10\n\x08versions\x18\x03 \x03(\x04\"\x0f\n\rBackupRequest\"a\n\x0b\x42\x61\x63kupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x02(\r\x12\x11\n\ttimestamp\x18\x04 \x02(\x04\x12\x0c\n\x04size\x18\x05 \x02(\x04\"\x1a\n\x18RestoreFromBackupRequest\"?\n\x16RestoreFromBackupReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\"\x17\n\x15\x43ompactAllDataRequest\"<\n\x13\x43ompactAllDataReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t\":\n\x0f\x45xportDBRequest\x12\x12\n\nnewDataDir\x18\x01 \x02(\t\x12\x13\n\x0boptionsFile\x18\x02 \x01(\t\"6\n\rExportDBReply\x12\x0f\n\x07success\x18\x01 \x02(\x08\x12\x14\n\x0c\x65rrorMessage\x18\x02 \x01(\t2\xfe\x11\n\x08\x46ossilDB\x12j\n\x06Health\x12/.com.scalableminds.fossildb.proto.HealthRequest\x1a-.com.scalableminds.fossildb.proto.HealthReply\"\x00\x12\x61\n\x03Get\x12,.com.scalableminds.fossildb.proto.GetRequest\x1a*.com.scalableminds.fossildb.proto.GetReply\"\x00\x12\x91\x01\n\x13GetMultipleVersions\x12<.com.scalableminds.fossildb.proto.GetMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.GetMultipleVersionsReply\"\x00\x12\x85\x01\n\x0fGetMultipleKeys\x12\x38.com.scalableminds.fossildb.proto.GetMultipleKeysRequest\x1a\x36.com.scalableminds.fossildb.proto.GetMultipleKeysReply\"\x00\x12\xd3\x01\n)GetMultipleKeysByListWithMultipleVersions\x12R.com.scalableminds.fossildb.proto.GetMultipleKeysByListWithMultipleVersionsRequest\x1aP.com.scalableminds.fossildb.proto.GetMultipleKeysByListWithMultipleVersionsReply\"\x00\x12\x61\n\x03Put\x12,.com.scalableminds.fossildb.proto.PutRequest\x1a*.com.scalableminds.fossildb.proto.PutReply\"\x00\x12\x91\x01\n\x13PutMultipleVersions\x12<.com.scalableminds.fossildb.proto.PutMultipleVersionsRequest\x1a:.com.scalableminds.fossildb.proto.PutMultipleVersionsReply\"\x00\x12\xc1\x01\n#PutMultipleKeysWithMultipleVersions\x12L.com.scalableminds.fossildb.proto.PutMultipleKeysWithMultipleVersionsRequest\x1aJ.com.scalableminds.fossildb.proto.PutMultipleKeysWithMultipleVersionsReply\"\x00\x12j\n\x06\x44\x65lete\x12/.com.scalableminds.fossildb.proto.DeleteRequest\x1a-.com.scalableminds.fossildb.proto.DeleteReply\"\x00\x12\x9a\x01\n\x16\x44\x65leteMultipleVersions\x12?.com.scalableminds.fossildb.proto.DeleteMultipleVersionsRequest\x1a=.com.scalableminds.fossildb.proto.DeleteMultipleVersionsReply\"\x00\x12\x8b\x01\n\x11\x44\x65leteAllByPrefix\x12:.com.scalableminds.fossildb.proto.DeleteAllByPrefixRequest\x1a\x38.com.scalableminds.fossildb.proto.DeleteAllByPrefixReply\"\x00\x12p\n\x08ListKeys\x12\x31.com.scalableminds.fossildb.proto.ListKeysRequest\x1a/.com.scalableminds.fossildb.proto.ListKeysReply\"\x00\x12|\n\x0cListVersions\x12\x35.com.scalableminds.fossildb.proto.ListVersionsRequest\x1a\x33.com.scalableminds.fossildb.proto.ListVersionsReply\"\x00\x12j\n\x06\x42\x61\x63kup\x12/.com.scalableminds.fossildb.proto.BackupRequest\x1a-.com.scalableminds.fossildb.proto.BackupReply\"\x00\x12\x8b\x01\n\x11RestoreFromBackup\x12:.com.scalableminds.fossildb.proto.RestoreFromBackupRequest\x1a\x38.com.scalableminds.fossildb.proto.RestoreFromBackupReply\"\x00\x12\x82\x01\n\x0e\x43ompactAllData\x12\x37.com.scalableminds.fossildb.proto.CompactAllDataRequest\x1a\x35.com.scalableminds.fossildb.proto.CompactAllDataReply\"\x00\x12p\n\x08\x45xportDB\x12\x31.com.scalableminds.fossildb.proto.ExportDBRequest\x1a/.com.scalableminds.fossildb.proto.ExportDBReply\"\x00') _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'fossildbapi_pb2', globals()) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _HEALTHREQUEST._serialized_start=55 - _HEALTHREQUEST._serialized_end=70 - _HEALTHREPLY._serialized_start=72 - _HEALTHREPLY._serialized_end=124 - _GETREQUEST._serialized_start=126 - _GETREQUEST._serialized_end=208 - _GETREPLY._serialized_start=210 - _GETREPLY._serialized_end=297 - _PUTREQUEST._serialized_start=299 - _PUTREQUEST._serialized_end=376 - _PUTREPLY._serialized_start=378 - _PUTREPLY._serialized_end=427 - _PUTMULTIPLEVERSIONSREQUEST._serialized_start=429 - _PUTMULTIPLEVERSIONSREQUEST._serialized_end=524 - _PUTMULTIPLEVERSIONSREPLY._serialized_start=526 - _PUTMULTIPLEVERSIONSREPLY._serialized_end=591 - _DELETEREQUEST._serialized_start=593 - _DELETEREQUEST._serialized_end=658 - _DELETEREPLY._serialized_start=660 - _DELETEREPLY._serialized_end=712 - _DELETEALLBYPREFIXREQUEST._serialized_start=714 - _DELETEALLBYPREFIXREQUEST._serialized_end=776 - _DELETEALLBYPREFIXREPLY._serialized_start=778 - _DELETEALLBYPREFIXREPLY._serialized_end=841 - _GETMULTIPLEVERSIONSREQUEST._serialized_start=843 - _GETMULTIPLEVERSIONSREQUEST._serialized_end=950 - _GETMULTIPLEVERSIONSREPLY._serialized_start=952 - _GETMULTIPLEVERSIONSREPLY._serialized_end=1051 - _GETMULTIPLEKEYSREQUEST._serialized_start=1053 - _GETMULTIPLEKEYSREQUEST._serialized_end=1168 - _GETMULTIPLEKEYSREPLY._serialized_start=1170 - _GETMULTIPLEKEYSREPLY._serialized_end=1285 - _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=1287 - _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=1397 - _DELETEMULTIPLEVERSIONSREPLY._serialized_start=1399 - _DELETEMULTIPLEVERSIONSREPLY._serialized_end=1467 - _LISTKEYSREQUEST._serialized_start=1469 - _LISTKEYSREQUEST._serialized_end=1544 - _LISTKEYSREPLY._serialized_start=1546 - _LISTKEYSREPLY._serialized_end=1614 - _LISTVERSIONSREQUEST._serialized_start=1616 - _LISTVERSIONSREQUEST._serialized_end=1701 - _LISTVERSIONSREPLY._serialized_start=1703 - _LISTVERSIONSREPLY._serialized_end=1779 - _BACKUPREQUEST._serialized_start=1781 - _BACKUPREQUEST._serialized_end=1796 - _BACKUPREPLY._serialized_start=1798 - _BACKUPREPLY._serialized_end=1895 - _RESTOREFROMBACKUPREQUEST._serialized_start=1897 - _RESTOREFROMBACKUPREQUEST._serialized_end=1923 - _RESTOREFROMBACKUPREPLY._serialized_start=1925 - _RESTOREFROMBACKUPREPLY._serialized_end=1988 - _COMPACTALLDATAREQUEST._serialized_start=1990 - _COMPACTALLDATAREQUEST._serialized_end=2013 - _COMPACTALLDATAREPLY._serialized_start=2015 - _COMPACTALLDATAREPLY._serialized_end=2075 - _EXPORTDBREQUEST._serialized_start=2077 - _EXPORTDBREQUEST._serialized_end=2135 - _EXPORTDBREPLY._serialized_start=2137 - _EXPORTDBREPLY._serialized_end=2191 - _FOSSILDB._serialized_start=2194 - _FOSSILDB._serialized_end=4086 + _VERSIONEDKEYVALUEPAIRPROTO._serialized_start=55 + _VERSIONEDKEYVALUEPAIRPROTO._serialized_end=128 + _KEYVERSIONSVALUESPAIRPROTO._serialized_start=130 + _KEYVERSIONSVALUESPAIRPROTO._serialized_end=255 + _VERSIONVALUEPAIRPROTO._serialized_start=257 + _VERSIONVALUEPAIRPROTO._serialized_end=318 + _HEALTHREQUEST._serialized_start=320 + _HEALTHREQUEST._serialized_end=335 + _HEALTHREPLY._serialized_start=337 + _HEALTHREPLY._serialized_end=389 + _GETREQUEST._serialized_start=391 + _GETREQUEST._serialized_end=473 + _GETREPLY._serialized_start=475 + _GETREPLY._serialized_end=562 + _PUTREQUEST._serialized_start=564 + _PUTREQUEST._serialized_end=641 + _PUTREPLY._serialized_start=643 + _PUTREPLY._serialized_end=692 + _PUTMULTIPLEVERSIONSREQUEST._serialized_start=694 + _PUTMULTIPLEVERSIONSREQUEST._serialized_end=789 + _PUTMULTIPLEVERSIONSREPLY._serialized_start=791 + _PUTMULTIPLEVERSIONSREPLY._serialized_end=856 + _PUTMULTIPLEKEYSWITHMULTIPLEVERSIONSREQUEST._serialized_start=859 + _PUTMULTIPLEKEYSWITHMULTIPLEVERSIONSREQUEST._serialized_end=1017 + _PUTMULTIPLEKEYSWITHMULTIPLEVERSIONSREPLY._serialized_start=1019 + _PUTMULTIPLEKEYSWITHMULTIPLEVERSIONSREPLY._serialized_end=1100 + _DELETEREQUEST._serialized_start=1102 + _DELETEREQUEST._serialized_end=1167 + _DELETEREPLY._serialized_start=1169 + _DELETEREPLY._serialized_end=1221 + _DELETEALLBYPREFIXREQUEST._serialized_start=1223 + _DELETEALLBYPREFIXREQUEST._serialized_end=1285 + _DELETEALLBYPREFIXREPLY._serialized_start=1287 + _DELETEALLBYPREFIXREPLY._serialized_end=1350 + _GETMULTIPLEVERSIONSREQUEST._serialized_start=1352 + _GETMULTIPLEVERSIONSREQUEST._serialized_end=1459 + _GETMULTIPLEVERSIONSREPLY._serialized_start=1461 + _GETMULTIPLEVERSIONSREPLY._serialized_end=1560 + _GETMULTIPLEKEYSREQUEST._serialized_start=1562 + _GETMULTIPLEKEYSREQUEST._serialized_end=1677 + _GETMULTIPLEKEYSREPLY._serialized_start=1679 + _GETMULTIPLEKEYSREPLY._serialized_end=1794 + _GETMULTIPLEKEYSBYLISTWITHMULTIPLEVERSIONSREQUEST._serialized_start=1797 + _GETMULTIPLEKEYSBYLISTWITHMULTIPLEVERSIONSREQUEST._serialized_end=1927 + _GETMULTIPLEKEYSBYLISTWITHMULTIPLEVERSIONSREPLY._serialized_start=1930 + _GETMULTIPLEKEYSBYLISTWITHMULTIPLEVERSIONSREPLY._serialized_end=2111 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_start=2113 + _DELETEMULTIPLEVERSIONSREQUEST._serialized_end=2223 + _DELETEMULTIPLEVERSIONSREPLY._serialized_start=2225 + _DELETEMULTIPLEVERSIONSREPLY._serialized_end=2293 + _LISTKEYSREQUEST._serialized_start=2295 + _LISTKEYSREQUEST._serialized_end=2386 + _LISTKEYSREPLY._serialized_start=2388 + _LISTKEYSREPLY._serialized_end=2456 + _LISTVERSIONSREQUEST._serialized_start=2458 + _LISTVERSIONSREQUEST._serialized_end=2543 + _LISTVERSIONSREPLY._serialized_start=2545 + _LISTVERSIONSREPLY._serialized_end=2621 + _BACKUPREQUEST._serialized_start=2623 + _BACKUPREQUEST._serialized_end=2638 + _BACKUPREPLY._serialized_start=2640 + _BACKUPREPLY._serialized_end=2737 + _RESTOREFROMBACKUPREQUEST._serialized_start=2739 + _RESTOREFROMBACKUPREQUEST._serialized_end=2765 + _RESTOREFROMBACKUPREPLY._serialized_start=2767 + _RESTOREFROMBACKUPREPLY._serialized_end=2830 + _COMPACTALLDATAREQUEST._serialized_start=2832 + _COMPACTALLDATAREQUEST._serialized_end=2855 + _COMPACTALLDATAREPLY._serialized_start=2857 + _COMPACTALLDATAREPLY._serialized_end=2917 + _EXPORTDBREQUEST._serialized_start=2919 + _EXPORTDBREQUEST._serialized_end=2977 + _EXPORTDBREPLY._serialized_start=2979 + _EXPORTDBREPLY._serialized_end=3033 + _FOSSILDB._serialized_start=3036 + _FOSSILDB._serialized_end=5338 # @@protoc_insertion_point(module_scope) diff --git a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py index afb3aac4a4a..192102160db 100644 --- a/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py +++ b/tools/migration-unified-annotation-versioning/fossildbapi_pb2_grpc.py @@ -34,6 +34,11 @@ def __init__(self, channel): request_serializer=fossildbapi__pb2.GetMultipleKeysRequest.SerializeToString, response_deserializer=fossildbapi__pb2.GetMultipleKeysReply.FromString, ) + self.GetMultipleKeysByListWithMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeysByListWithMultipleVersions', + request_serializer=fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsReply.FromString, + ) self.Put = channel.unary_unary( '/com.scalableminds.fossildb.proto.FossilDB/Put', request_serializer=fossildbapi__pb2.PutRequest.SerializeToString, @@ -44,6 +49,11 @@ def __init__(self, channel): request_serializer=fossildbapi__pb2.PutMultipleVersionsRequest.SerializeToString, response_deserializer=fossildbapi__pb2.PutMultipleVersionsReply.FromString, ) + self.PutMultipleKeysWithMultipleVersions = channel.unary_unary( + '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleKeysWithMultipleVersions', + request_serializer=fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsRequest.SerializeToString, + response_deserializer=fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsReply.FromString, + ) self.Delete = channel.unary_unary( '/com.scalableminds.fossildb.proto.FossilDB/Delete', request_serializer=fossildbapi__pb2.DeleteRequest.SerializeToString, @@ -118,6 +128,12 @@ def GetMultipleKeys(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetMultipleKeysByListWithMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Put(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -130,6 +146,12 @@ def PutMultipleVersions(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def PutMultipleKeysWithMultipleVersions(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def Delete(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -207,6 +229,11 @@ def add_FossilDBServicer_to_server(servicer, server): request_deserializer=fossildbapi__pb2.GetMultipleKeysRequest.FromString, response_serializer=fossildbapi__pb2.GetMultipleKeysReply.SerializeToString, ), + 'GetMultipleKeysByListWithMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.GetMultipleKeysByListWithMultipleVersions, + request_deserializer=fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsReply.SerializeToString, + ), 'Put': grpc.unary_unary_rpc_method_handler( servicer.Put, request_deserializer=fossildbapi__pb2.PutRequest.FromString, @@ -217,6 +244,11 @@ def add_FossilDBServicer_to_server(servicer, server): request_deserializer=fossildbapi__pb2.PutMultipleVersionsRequest.FromString, response_serializer=fossildbapi__pb2.PutMultipleVersionsReply.SerializeToString, ), + 'PutMultipleKeysWithMultipleVersions': grpc.unary_unary_rpc_method_handler( + servicer.PutMultipleKeysWithMultipleVersions, + request_deserializer=fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsRequest.FromString, + response_serializer=fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsReply.SerializeToString, + ), 'Delete': grpc.unary_unary_rpc_method_handler( servicer.Delete, request_deserializer=fossildbapi__pb2.DeleteRequest.FromString, @@ -340,6 +372,23 @@ def GetMultipleKeys(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def GetMultipleKeysByListWithMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/GetMultipleKeysByListWithMultipleVersions', + fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.GetMultipleKeysByListWithMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def Put(request, target, @@ -374,6 +423,23 @@ def PutMultipleVersions(request, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod + def PutMultipleKeysWithMultipleVersions(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/com.scalableminds.fossildb.proto.FossilDB/PutMultipleKeysWithMultipleVersions', + fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsRequest.SerializeToString, + fossildbapi__pb2.PutMultipleKeysWithMultipleVersionsReply.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + @staticmethod def Delete(request, target, diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 24d5c2c6f82..456a1780f77 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -14,12 +14,13 @@ from functools import partial import heapq import sys +from viztracer import VizTracer import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume import SkeletonTracing_pb2 as Skeleton import Annotation_pb2 as AnnotationProto -from utils import log_since, batch_range, format_duration, time_str +from utils import log_since, batch_range, batch_list, format_duration, time_str from connections import connect_to_fossildb, connect_to_postgres, assert_grpc_success logger = logging.getLogger("migration-logs") @@ -73,16 +74,20 @@ def migrate_annotation(self, annotation): if versions > 1: logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") else: + #tracer = VizTracer(tracer_entries=10000000) + #tracer.start() if self.args.verbose: logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) - if self.includes_revert(annotation) and self.args.previous_start is not None: + if self.args.previous_start is not None and self.includes_revert(annotation): self.clean_up_previously_migrated(annotation, mapping_id_map) layer_version_mapping = self.migrate_updates(annotation, mapping_id_map) materialized_versions = self.migrate_materialized_layers(annotation, layer_version_mapping, mapping_id_map) if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") self.create_and_save_annotation_proto(annotation, materialized_versions, mapping_id_map) + #tracer.stop() + #tracer.save() if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) checkpoint_logger.info(annotation['_id']) @@ -109,10 +114,10 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: updates_for_layer = [] included_revert = False next_version = newest_version - for batch_start, batch_end in reversed(list(batch_range(newest_version, batch_size))): + for batch_start, batch_end in reversed(list(batch_range(newest_version + 1, batch_size))): # TODO check overlaps? if batch_start > next_version: continue - update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end) + update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end - 1) for version, update_group in reversed(update_groups): if version > next_version: continue @@ -128,6 +133,7 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: return updates_for_layer, included_revert def includes_revert(self, annotation) -> bool: + logger.info("checking if includes revert") json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() layers = list(annotation["layers"].items()) @@ -139,6 +145,9 @@ def includes_revert(self, annotation) -> bool: return False def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: + put_updates_buffer_size = 500 + logger.info("migrating updates...") + before = time.time() all_update_groups = [] json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() @@ -154,12 +163,15 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers layer_updates, _ = self.fetch_updates(mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) all_update_groups.append(layer_updates) tracing_ids_and_mapping_ids.append(mapping_id) + log_since(before, "fetch updates") unified_version = 0 version_mapping = {} for tracing_or_mapping_id in tracing_ids_and_mapping_ids: version_mapping[tracing_or_mapping_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. + buffered_versions_to_put = [] + buffered_updates_to_put = [] # We use a priority queue to efficiently select which tracing each next update should come from. # This effectively implements a merge sort queue = [] @@ -174,12 +186,23 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers unified_version += 1 version_mapping[tracing_or_mapping_id][version] = unified_version - self.save_update_group(annotation['_id'], unified_version, update_group) + buffered_versions_to_put.append(unified_version) + buffered_updates_to_put.append(update_group) + if len(buffered_versions_to_put) >= put_updates_buffer_size: + # flush + self.save_update_groups(annotation['_id'], buffered_versions_to_put, buffered_updates_to_put) + buffered_versions_to_put = [] + buffered_updates_to_put = [] if element_index + 1 < len(all_update_groups[layer_index]): next_element = all_update_groups[layer_index][element_index + 1] heapq.heappush(queue, (next_element, layer_index, element_index + 1)) + if len(buffered_versions_to_put) > 0: + # flush + self.save_update_groups(annotation['_id'], buffered_versions_to_put, buffered_updates_to_put) + + log_since(before, "updates total") return version_mapping def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: @@ -246,8 +269,8 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra return json_encoder.encode(update_group_parsed), action_timestamp, revert_source_version - def save_update_group(self, annotation_id: str, version: int, update_group_raw: bytes) -> None: - self.save_bytes(collection="annotationUpdates", key=annotation_id, version=version, value=update_group_raw) + def save_update_groups(self, annotation_id: str, versions: List[int], update_groups_raw: List[bytes]) -> None: + self.save_multiple_versions(collection="annotationUpdates", key=annotation_id, versions=versions, values=update_groups_raw) def get_newest_version(self, tracing_id: str, collection: str) -> int: getReply = self.src_stub.Get( @@ -280,6 +303,7 @@ def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_map return materialized_versions def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: + logger.info(f"migrating materialized {layer_type} layer {tracing_id}...") if layer_type == "Skeleton": return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) if layer_type == "Volume": @@ -308,9 +332,44 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe return materialized_versions_unified def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + volume_proto_page_size = 500 + logger.info("migrating volume protos...") + collection = "volumes" + materialized_versions_unified = [] + before = time.time() + newest_tracing_version = max(layer_version_mapping[tracing_id].keys()) + for version_range_start, version_range_end in batch_range(newest_tracing_version + 1, volume_proto_page_size): + reply = self.src_stub.GetMultipleVersions( + proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=version_range_start, newestVersion=version_range_end - 1) + ) + versions_to_put = [] + values_to_put = [] + for (materialized_version, value_bytes) in zip(reply.versions, reply.values): + if materialized_version not in layer_version_mapping[tracing_id]: + continue + new_version = layer_version_mapping[tracing_id][materialized_version] + if materialized_version != new_version or tracing_id in mapping_id_map: + volume = Volume.VolumeTracing() + volume.ParseFromString(value_bytes) + volume.version = new_version + if tracing_id in mapping_id_map: + volume.mappingName = tracing_id + value_bytes = volume.SerializeToString() + materialized_versions_unified.append(new_version) + versions_to_put.append(new_version) + values_to_put.append(value_bytes) + self.put_multiple_versions(collection, tracing_id, versions_to_put, values_to_put) + log_since(before, "volume proto total") + return materialized_versions_unified + + def migrate_volume_proto_LEGACY(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): + logger.info("migrating volume protos...") collection = "volumes" materialized_versions_unified = [] + before = time.time() materialized_versions = self.list_versions(collection, tracing_id) + print(materialized_versions) + log_since(before, "list versions volume proto") for materialized_version in materialized_versions: if materialized_version not in layer_version_mapping[tracing_id]: continue @@ -325,6 +384,7 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers value_bytes = volume.SerializeToString() materialized_versions_unified.append(new_version) self.save_bytes(collection, tracing_id, new_version, value_bytes) + log_since(before, "volume proto total") return materialized_versions_unified def list_versions(self, collection, key) -> List[int]: @@ -342,15 +402,30 @@ def save_bytes(self, collection: str, key: str, version: int, value: bytes) -> N reply = self.dst_stub.Put(proto.PutRequest(collection=collection, key=key, version=version, value=value)) assert_grpc_success(reply) + def put_multiple_versions(self, collection: str, key: str, versions: List[int], values: List[bytes]) -> None: + if self.dst_stub is not None: + reply = self.dst_stub.PutMultipleVersions(proto.PutMultipleVersionsRequest(collection=collection, key=key, versions=versions, values=values)) + assert_grpc_success(reply) + + def put_multiple_keys_versions(self, collection: str, to_put) -> None: + if self.dst_stub is not None: + reply = self.dst_stub.PutMultipleKeysWithMultipleVersions(proto.PutMultipleKeysWithMultipleVersionsRequest(collection=collection, versionedKeyValuePairs = to_put)) + assert_grpc_success(reply) + + # TODO remove if multi-put is faster def save_multiple_versions(self, collection: str, key: str, versions: List[int], values: List[bytes]) -> None: if self.dst_stub is not None: reply = self.dst_stub.PutMultipleVersions(proto.PutMultipleVersionsRequest(collection=collection, key=key, versions=versions, values=values)) assert_grpc_success(reply) def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): + logger.info("migrating volume buckets...") + before = time.time() self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) + log_since(before, "migrating volume buckets") - def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): + + def migrate_all_versions_and_keys_with_prefix_LEGACY(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): list_keys_page_size = 5000 versions_page_size = 500 current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / @@ -383,12 +458,49 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_ # We iterated past the elements of the current tracing return + + def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): + list_keys_page_size = 10000 + get_keys_page_size = 500 + current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / + while True: + list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key, prefix=tracing_or_mapping_id)) + assert_grpc_success(list_keys_reply) + if len(list_keys_reply.keys) == 0: + # We iterated towards the very end of the collection + return + for key_batch in batch_list(list_keys_reply.keys, get_keys_page_size): + get_keys_with_versions_reply = self.src_stub.GetMultipleKeysByListWithMultipleVersions(proto.GetMultipleKeysByListWithMultipleVersionsRequest(collection=collection, keys=key_batch)) + assert_grpc_success(get_keys_with_versions_reply) + to_put = [] + for keyVersionsValuesPair in get_keys_with_versions_reply.keyVersionsValuesPairs: + key = keyVersionsValuesPair.key + if not key.startswith(tracing_or_mapping_id): + raise Exception(f"key does not stat with tracing/mapping id: {key}") + new_key = key + if transform_key is not None: + new_key = transform_key(key) + for version_value_pair in keyVersionsValuesPair.versionValuePairs: + if version_value_pair.actualVersion not in layer_version_mapping[tracing_or_mapping_id]: + continue + new_version = layer_version_mapping[tracing_or_mapping_id][version_value_pair.actualVersion] + versioned_key_value_pair = proto.VersionedKeyValuePairProto() + versioned_key_value_pair.key = key + versioned_key_value_pair.version = new_version + versioned_key_value_pair.value = version_value_pair.value + to_put.append(versioned_key_value_pair) + + self.put_multiple_keys_versions(collection, to_put) + current_start_after_key = list_keys_reply.keys[-1] + def migrate_segment_index(self, tracing_id, layer_version_mapping): + logger.info("migrating volume segment index...") self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if tracing_id not in mapping_id_map: return [] + logger.info(f"migrating editable mapping of tracing {tracing_id}...") mapping_id = mapping_id_map[tracing_id] materialized_versions = self.migrate_editable_mapping_info(tracing_id, mapping_id, layer_version_mapping) self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, mapping_id, layer_version_mapping) @@ -425,6 +537,7 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi ) def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): + logger.info("writing annotationProtos...") skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) editable_mapping_may_have_pending_updates = bool(mapping_id_map) # same problem as with skeletons, see comment there earliest_accessible_version = 0 diff --git a/tools/migration-unified-annotation-versioning/utils.py b/tools/migration-unified-annotation-versioning/utils.py index 91def43fcba..dcb2f1b6829 100644 --- a/tools/migration-unified-annotation-versioning/utils.py +++ b/tools/migration-unified-annotation-versioning/utils.py @@ -1,6 +1,6 @@ import logging import time -from typing import Iterator, Tuple +from typing import Iterator, Tuple, List import sys from math import floor, ceil from datetime import datetime @@ -48,6 +48,11 @@ def batch_range( return +def batch_list(lst: List, batch_size: int) -> Iterator[List]: + for start, end in batch_range(len(lst), batch_size): + yield lst[start:end] + + def format_duration(seconds: float) -> str: def pluralize(string: str, amount: int) -> str: return string if amount == 1 else string + "s" From 061d4095eff97a20477dc57d415add8005d05ef1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 9 Jan 2025 14:52:17 +0100 Subject: [PATCH 355/361] prepare merge for import sorting pr --- biome.json | 17 +- frontend/javascripts/admin/admin_rest_api.ts | 102 ++++---- frontend/javascripts/admin/api/folders.ts | 2 +- frontend/javascripts/admin/api/jobs.ts | 6 +- frontend/javascripts/admin/api/tasks.ts | 18 +- frontend/javascripts/admin/api/token.ts | 2 +- .../admin/auth/accept_invite_view.tsx | 14 +- .../admin/auth/auth_token_view.tsx | 6 +- .../admin/auth/authentication_modal.tsx | 14 +- .../admin/auth/change_password_view.tsx | 6 +- .../admin/auth/finish_reset_password_view.tsx | 6 +- .../javascripts/admin/auth/login_form.tsx | 12 +- .../javascripts/admin/auth/login_view.tsx | 4 +- .../admin/auth/registration_form_generic.tsx | 18 +- .../admin/auth/registration_form_wkorg.tsx | 14 +- .../admin/auth/registration_view.tsx | 14 +- .../admin/auth/start_reset_password_view.tsx | 6 +- .../admin/auth/verify_email_view.tsx | 8 +- .../composition_wizard/02_upload_files.tsx | 8 +- .../composition_wizard/03_select_datasets.tsx | 4 +- .../04_configure_new_dataset.tsx | 10 +- .../dataset/composition_wizard/common.ts | 2 +- .../admin/dataset/dataset_add_remote_view.tsx | 48 ++-- .../admin/dataset/dataset_add_view.tsx | 22 +- .../admin/dataset/dataset_components.tsx | 10 +- .../admin/dataset/dataset_upload_view.tsx | 92 +++---- .../admin/dataset/dataset_url_import.tsx | 2 +- .../admin/datastore_health_check.ts | 4 +- frontend/javascripts/admin/job/job_hooks.ts | 4 +- .../javascripts/admin/job/job_list_view.tsx | 30 +-- frontend/javascripts/admin/onboarding.tsx | 38 +-- .../admin/organization/organization_cards.tsx | 4 +- .../organization/organization_edit_view.tsx | 22 +- .../admin/organization/upgrade_plan_modal.tsx | 18 +- .../admin/project/project_create_view.tsx | 16 +- .../admin/project/project_list_view.tsx | 50 ++-- .../project/transfer_all_tasks_modal.tsx | 14 +- .../admin/scripts/script_create_view.tsx | 20 +- .../admin/scripts/script_list_view.tsx | 16 +- .../statistic/available_tasks_report_view.tsx | 8 +- .../project_and_annotation_type_dropdown.tsx | 10 +- .../project_progress_report_view.tsx | 8 +- .../admin/statistic/team_selection_form.tsx | 4 +- .../statistic/time_tracking_detail_view.tsx | 8 +- .../statistic/time_tracking_overview.tsx | 28 +- .../admin/task/task_annotation_view.tsx | 38 +-- .../admin/task/task_create_bulk_view.tsx | 14 +- .../admin/task/task_create_form_view.tsx | 76 +++--- .../admin/task/task_create_view.tsx | 2 +- .../javascripts/admin/task/task_list_view.tsx | 34 +-- .../admin/task/task_search_form.tsx | 10 +- .../recommended_configuration_view.tsx | 12 +- .../admin/tasktype/task_type_create_view.tsx | 34 +-- .../admin/tasktype/task_type_list_view.tsx | 34 +-- .../admin/team/create_team_modal_view.tsx | 2 +- .../admin/team/edit_team_modal_view.tsx | 4 +- .../javascripts/admin/team/team_list_view.tsx | 20 +- .../admin/user/experience_modal_view.tsx | 10 +- .../user/permissions_and_teams_modal_view.tsx | 12 +- .../javascripts/admin/user/user_list_view.tsx | 48 ++-- .../admin/user/user_selection_component.tsx | 6 +- .../admin/voxelytics/ai_model_list_view.tsx | 22 +- .../voxelytics/artifacts_disk_usage_list.tsx | 8 +- .../admin/voxelytics/artifacts_view.tsx | 8 +- .../javascripts/admin/voxelytics/dag_view.tsx | 12 +- .../javascripts/admin/voxelytics/log_tab.tsx | 14 +- .../admin/voxelytics/statistics_tab.tsx | 14 +- .../admin/voxelytics/task_list_view.tsx | 72 ++--- .../admin/voxelytics/task_view.tsx | 4 +- .../admin/voxelytics/workflow_list_view.tsx | 22 +- .../admin/voxelytics/workflow_view.tsx | 16 +- frontend/javascripts/admin/welcome_ui.tsx | 8 +- frontend/javascripts/banners.tsx | 2 +- .../components/async_clickables.tsx | 2 +- .../javascripts/components/brain_spinner.tsx | 10 +- .../javascripts/components/color_picker.tsx | 6 +- .../components/disable_generic_dnd.ts | 2 +- frontend/javascripts/components/legal.tsx | 4 +- frontend/javascripts/components/loop.ts | 2 +- .../components/permission_enforcer.tsx | 2 +- .../components/pricing_enforcers.tsx | 20 +- frontend/javascripts/components/redirect.tsx | 4 +- .../javascripts/components/secured_route.tsx | 14 +- .../components/select_experience_domain.tsx | 2 +- .../components/terms_of_services_check.tsx | 12 +- .../components/text_with_description.tsx | 2 +- .../create_explorative_modal.tsx | 18 +- .../dataset_access_list_view.tsx | 6 +- .../advanced_dataset/dataset_action_view.tsx | 20 +- .../advanced_dataset/dataset_table.tsx | 60 ++--- .../dashboard/dashboard_task_list_view.tsx | 24 +- .../javascripts/dashboard/dashboard_view.tsx | 38 +-- .../color_layer_ordering_component.tsx | 8 +- .../dataset/dataset_collection_context.tsx | 24 +- .../dataset/dataset_settings_data_tab.tsx | 42 +-- .../dataset/dataset_settings_delete_tab.tsx | 8 +- .../dataset/dataset_settings_metadata_tab.tsx | 2 +- .../dataset/dataset_settings_sharing_tab.tsx | 26 +- .../dataset/dataset_settings_view.tsx | 62 ++--- .../dataset_settings_viewconfig_tab.tsx | 30 +-- .../dashboard/dataset/helper_components.tsx | 10 +- .../javascripts/dashboard/dataset/queries.tsx | 8 +- .../dataset/team_selection_component.tsx | 4 +- .../dashboard/dataset_folder_view.tsx | 12 +- .../javascripts/dashboard/dataset_view.tsx | 54 ++-- .../explorative_annotations_view.tsx | 90 +++---- .../dashboard/folders/details_sidebar.tsx | 16 +- .../dashboard/folders/folder_selection.tsx | 4 +- .../dashboard/folders/folder_tree.tsx | 10 +- .../dashboard/folders/metadata_table.tsx | 12 +- .../dashboard/publication_card.tsx | 18 +- .../dashboard/publication_details_view.tsx | 6 +- .../dashboard/publication_view.tsx | 12 +- .../dashboard/transfer_task_modal.tsx | 8 +- frontend/javascripts/libs/DRACOLoader.ts | 2 +- .../libs/async/debounced_abortable_saga.ts | 4 +- frontend/javascripts/libs/async/task_pool.ts | 2 +- .../libs/cuckoo/abstract_cuckoo_table.ts | 2 +- .../libs/cuckoo/cuckoo_table_uint32.ts | 2 +- .../libs/cuckoo/cuckoo_table_uint64.ts | 2 +- frontend/javascripts/libs/draco.ts | 2 +- frontend/javascripts/libs/error_handling.ts | 6 +- frontend/javascripts/libs/format_utils.ts | 20 +- frontend/javascripts/libs/gist.ts | 2 +- frontend/javascripts/libs/input.ts | 6 +- frontend/javascripts/libs/persistence.ts | 2 +- frontend/javascripts/libs/react_helpers.tsx | 6 +- frontend/javascripts/libs/react_hooks.ts | 2 +- .../javascripts/libs/render_independently.tsx | 4 +- frontend/javascripts/libs/request.ts | 10 +- .../javascripts/libs/shortcut_component.ts | 2 +- frontend/javascripts/libs/toast.tsx | 2 +- .../javascripts/libs/trackball_controls.ts | 2 +- frontend/javascripts/libs/utils.ts | 18 +- frontend/javascripts/libs/vector_input.tsx | 8 +- frontend/javascripts/libs/window.ts | 2 +- frontend/javascripts/main.tsx | 36 +-- frontend/javascripts/messages.tsx | 2 +- frontend/javascripts/navbar.tsx | 84 +++--- frontend/javascripts/oxalis/api/api_latest.ts | 246 +++++++++--------- frontend/javascripts/oxalis/api/api_loader.ts | 2 +- .../oxalis/api/cross_origin_api.ts | 2 +- .../javascripts/oxalis/api/internal_api.ts | 2 +- frontend/javascripts/oxalis/api/wk_dev.ts | 10 +- frontend/javascripts/oxalis/controller.tsx | 50 ++-- .../oxalis/controller/camera_controller.ts | 20 +- .../combinations/bounding_box_handlers.ts | 18 +- .../controller/combinations/move_handlers.ts | 14 +- .../combinations/segmentation_handlers.ts | 16 +- .../combinations/skeleton_handlers.ts | 54 ++-- .../controller/combinations/tool_controls.ts | 72 ++--- .../combinations/volume_handlers.ts | 14 +- .../oxalis/controller/custom_lod.ts | 4 +- .../controller/merger_mode_controller.tsx | 6 +- .../javascripts/oxalis/controller/renderer.ts | 2 +- .../oxalis/controller/scene_controller.ts | 2 +- .../controller/segment_mesh_controller.ts | 8 +- .../oxalis/controller/td_controller.tsx | 40 +-- .../oxalis/controller/url_manager.ts | 30 +-- .../viewmodes/arbitrary_controller.tsx | 58 ++--- .../controller/viewmodes/plane_controller.tsx | 102 ++++---- frontend/javascripts/oxalis/default_state.ts | 8 +- .../oxalis/geometries/arbitrary_plane.ts | 8 +- .../oxalis/geometries/crosshair.ts | 2 +- .../javascripts/oxalis/geometries/cube.ts | 8 +- .../oxalis/geometries/helper_geometries.ts | 8 +- .../geometries/materials/edge_shader.ts | 12 +- .../geometries/materials/node_shader.ts | 16 +- .../materials/plane_material_factory.ts | 60 ++--- .../plane_material_factory_helpers.ts | 2 +- .../javascripts/oxalis/geometries/plane.ts | 10 +- .../javascripts/oxalis/geometries/skeleton.ts | 14 +- frontend/javascripts/oxalis/merger_mode.ts | 24 +- frontend/javascripts/oxalis/model.ts | 14 +- .../model/accessors/dataset_accessor.ts | 50 ++-- .../oxalis/model/accessors/flycam_accessor.ts | 42 +-- .../model/accessors/organization_accessors.ts | 2 +- .../accessors/skeletontracing_accessor.ts | 32 +-- .../oxalis/model/accessors/tool_accessor.ts | 24 +- .../model/accessors/tracing_accessor.ts | 4 +- .../oxalis/model/accessors/user_accessor.ts | 4 +- .../model/accessors/view_mode_accessor.ts | 18 +- .../model/accessors/volumetracing_accessor.ts | 70 ++--- .../oxalis/model/actions/actions.ts | 8 +- .../model/actions/annotation_actions.ts | 22 +- .../oxalis/model/actions/flycam_actions.ts | 2 +- .../oxalis/model/actions/proofread_actions.ts | 2 +- .../oxalis/model/actions/save_actions.ts | 10 +- .../oxalis/model/actions/settings_actions.ts | 8 +- .../model/actions/skeletontracing_actions.tsx | 2 +- .../oxalis/model/actions/ui_actions.ts | 2 +- .../oxalis/model/actions/view_mode_actions.ts | 4 +- .../model/actions/volumetracing_actions.ts | 10 +- .../bucket_data_handling/bounding_box.ts | 4 +- .../model/bucket_data_handling/bucket.ts | 24 +- .../flight_bucket_picker.ts | 8 +- .../oblique_bucket_picker.ts | 10 +- .../bucket_data_handling/bucket_traversals.ts | 4 +- .../model/bucket_data_handling/data_cube.ts | 44 ++-- .../data_rendering_logic.tsx | 6 +- .../layer_rendering_manager.ts | 38 +-- .../model/bucket_data_handling/mappings.ts | 20 +- .../prefetch_strategy_arbitrary.ts | 8 +- .../prefetch_strategy_plane.ts | 12 +- .../model/bucket_data_handling/pullqueue.ts | 6 +- .../model/bucket_data_handling/pushqueue.ts | 10 +- .../texture_bucket_manager.ts | 22 +- .../bucket_data_handling/wkstore_adapter.ts | 30 +-- .../oxalis/model/data_connection_info.ts | 2 +- .../javascripts/oxalis/model/data_layer.ts | 2 +- .../oxalis/model/edge_collection.ts | 4 +- .../model/helpers/action_logger_middleware.ts | 4 +- .../model/helpers/bucket_compression.ts | 2 +- .../compaction/compact_toggle_actions.ts | 4 +- .../compaction/compact_update_actions.ts | 6 +- .../oxalis/model/helpers/deep_update_test.ts | 2 +- .../oxalis/model/helpers/nml_helpers.ts | 50 ++-- .../helpers/overwrite_action_middleware.ts | 2 +- .../model/helpers/position_converter.ts | 2 +- .../oxalis/model/helpers/proto_helpers.ts | 12 +- .../oxalis/model/helpers/reduce_reducers.ts | 2 +- .../model/reducers/annotation_reducer.ts | 14 +- .../model/reducers/connectome_reducer.ts | 8 +- .../oxalis/model/reducers/dataset_reducer.ts | 10 +- .../oxalis/model/reducers/flycam_reducer.ts | 12 +- .../oxalis/model/reducers/reducer_helpers.ts | 32 +-- .../oxalis/model/reducers/save_reducer.ts | 10 +- .../oxalis/model/reducers/settings_reducer.ts | 12 +- .../model/reducers/skeletontracing_reducer.ts | 56 ++-- .../skeletontracing_reducer_helpers.ts | 66 ++--- .../oxalis/model/reducers/ui_reducer.ts | 4 +- .../model/reducers/view_mode_reducer.ts | 4 +- .../model/reducers/volumetracing_reducer.ts | 74 +++--- .../reducers/volumetracing_reducer_helpers.ts | 20 +- .../oxalis/model/sagas/annotation_saga.tsx | 60 ++--- .../model/sagas/annotation_tool_saga.ts | 14 +- .../oxalis/model/sagas/clip_histogram_saga.ts | 12 +- .../oxalis/model/sagas/dataset_saga.ts | 16 +- .../oxalis/model/sagas/effect-generators.ts | 4 +- .../model/sagas/load_histogram_data_saga.ts | 12 +- .../oxalis/model/sagas/mapping_saga.ts | 104 ++++---- .../oxalis/model/sagas/mesh_saga.ts | 106 ++++---- .../oxalis/model/sagas/min_cut_saga.ts | 28 +- .../oxalis/model/sagas/prefetch_saga.ts | 26 +- .../oxalis/model/sagas/proofread_saga.ts | 106 ++++---- .../sagas/quick_select_heuristic_saga.ts | 38 +-- .../model/sagas/quick_select_ml_saga.ts | 24 +- .../oxalis/model/sagas/quick_select_saga.ts | 14 +- .../oxalis/model/sagas/root_saga.ts | 28 +- .../oxalis/model/sagas/saga_helpers.ts | 8 +- .../oxalis/model/sagas/saga_selectors.ts | 10 +- .../oxalis/model/sagas/save_saga.ts | 4 +- .../oxalis/model/sagas/settings_saga.ts | 14 +- .../model/sagas/skeletontracing_saga.ts | 114 ++++---- .../oxalis/model/sagas/task_saga.tsx | 30 +-- .../oxalis/model/sagas/undo_saga.ts | 8 +- .../oxalis/model/sagas/update_actions.ts | 14 +- .../oxalis/model/sagas/user_saga.ts | 2 +- .../model/sagas/volume/floodfill_saga.tsx | 6 +- .../oxalis/model/sagas/volume/helpers.ts | 16 +- .../sagas/volume/volume_interpolation_saga.ts | 2 +- .../oxalis/model/sagas/volumetracing_saga.tsx | 8 +- .../volume_annotation_sampling.ts | 8 +- .../oxalis/model/volumetracing/volumelayer.ts | 20 +- .../oxalis/model_initialization.ts | 142 +++++----- .../javascripts/oxalis/shaders/coords.glsl.ts | 2 +- .../oxalis/shaders/main_data_shaders.glsl.ts | 44 ++-- .../oxalis/shaders/segmentation.glsl.ts | 14 +- .../oxalis/shaders/texture_access.glsl.ts | 2 +- frontend/javascripts/oxalis/store.ts | 98 +++---- .../javascripts/oxalis/throttled_store.ts | 6 +- .../action-bar/create_animation_modal.tsx | 36 +-- .../view/action-bar/dataset_position_view.tsx | 26 +- .../view/action-bar/download_modal_view.tsx | 84 +++--- .../view/action-bar/merge_modal_view.tsx | 34 +-- .../view/action-bar/private_links_view.tsx | 42 +-- .../view/action-bar/quick_select_settings.tsx | 20 +- .../oxalis/view/action-bar/save_button.tsx | 20 +- .../view/action-bar/share_modal_view.tsx | 68 ++--- .../share_view_dataset_modal_view.tsx | 6 +- .../view/action-bar/starting_job_modals.tsx | 82 +++--- .../oxalis/view/action-bar/toolbar_view.tsx | 114 ++++---- .../view/action-bar/tracing_actions_view.tsx | 78 +++--- .../action-bar/user_scripts_modal_view.tsx | 12 +- .../action-bar/view_dataset_actions_view.tsx | 22 +- .../view/action-bar/view_modes_view.tsx | 16 +- .../oxalis/view/action_bar_view.tsx | 56 ++-- .../javascripts/oxalis/view/arbitrary_view.ts | 28 +- .../view/components/border_toggle_button.tsx | 8 +- .../view/components/button_component.tsx | 4 +- .../view/components/categorization_label.tsx | 4 +- .../view/components/checkbox_component.tsx | 2 +- .../view/components/editable_text_label.tsx | 8 +- .../view/components/input_component.tsx | 4 +- .../oxalis/view/components/markdown_modal.tsx | 2 +- .../view/components/setting_input_views.tsx | 38 +-- .../javascripts/oxalis/view/context_menu.tsx | 200 +++++++------- .../view/distance_measurement_tooltip.tsx | 32 +-- .../javascripts/oxalis/view/help_modal.tsx | 2 +- .../javascripts/oxalis/view/input_catcher.tsx | 12 +- .../oxalis/view/jobs/train_ai_model.tsx | 62 ++--- .../oxalis/view/largest_segment_id_modal.tsx | 18 +- .../view/layouting/default_layout_configs.ts | 16 +- .../view/layouting/flex_layout_helper.ts | 2 +- .../view/layouting/flex_layout_wrapper.tsx | 46 ++-- .../view/layouting/layout_canvas_adapter.ts | 2 +- .../view/layouting/layout_persistence.ts | 10 +- .../oxalis/view/layouting/portal_utils.tsx | 2 +- .../view/layouting/tracing_layout_view.tsx | 4 +- .../controls_and_rendering_settings_tab.tsx | 46 ++-- .../view/left-border-tabs/histogram_view.tsx | 18 +- .../left-border-tabs/layer_settings_tab.tsx | 170 ++++++------ .../mapping_settings_view.tsx | 32 +-- .../modals/add_volume_layer_modal.tsx | 28 +- .../oxalis/view/merger_mode_modal_view.tsx | 2 +- .../view/new_task_description_modal.tsx | 2 +- .../oxalis/view/nml_upload_zone_container.tsx | 14 +- .../01-present-modern-controls.tsx | 8 +- .../novel_user_experiences/welcome_toast.tsx | 4 +- .../javascripts/oxalis/view/plane_view.ts | 30 +-- .../view/recommended_configuration_modal.tsx | 6 +- .../oxalis/view/recording_switch.tsx | 4 +- .../oxalis/view/remove_tree_modal.tsx | 8 +- .../oxalis/view/rendering_utils.ts | 8 +- .../abstract_tree_renderer.ts | 4 +- .../right-border-tabs/abstract_tree_tab.tsx | 10 +- .../advanced_search_popover.tsx | 8 +- .../right-border-tabs/bounding_box_tab.tsx | 30 +-- .../right-border-tabs/comment_tab/comment.tsx | 4 +- .../comment_tab/comment_tab_view.tsx | 38 +-- .../connectome_tab/connectome_filters.tsx | 8 +- .../connectome_tab/connectome_settings.tsx | 22 +- .../connectome_tab/connectome_view.tsx | 72 ++--- .../connectome_tab/synapse_tree.tsx | 12 +- .../dataset_info_tab_view.tsx | 40 +-- .../delete_group_modal_view.tsx | 2 +- .../view/right-border-tabs/metadata_table.tsx | 8 +- .../scrollable_virtualized_tree.tsx | 2 +- .../segments_tab/segment_list_item.tsx | 52 ++-- .../segments_tab/segment_statistics_modal.tsx | 24 +- .../segments_tab/segments_view.tsx | 20 +- .../segments_tab/segments_view_helper.tsx | 14 +- .../tree_hierarchy_view_helpers.ts | 2 +- .../trees_tab/skeleton_tab_view.tsx | 120 ++++----- .../trees_tab/tree_hierarchy_renderers.tsx | 12 +- .../trees_tab/tree_hierarchy_view.tsx | 28 +- frontend/javascripts/oxalis/view/scalebar.tsx | 12 +- .../javascripts/oxalis/view/statusbar.tsx | 46 ++-- .../oxalis/view/td_view_controls.tsx | 32 +-- .../javascripts/oxalis/view/tracing_view.tsx | 6 +- .../javascripts/oxalis/view/version_entry.tsx | 56 ++-- .../oxalis/view/version_entry_group.tsx | 8 +- .../javascripts/oxalis/view/version_list.tsx | 32 +-- .../javascripts/oxalis/view/version_view.tsx | 8 +- .../oxalis/view/viewport_status_indicator.tsx | 8 +- .../workers/async_bucket_picker.worker.ts | 2 +- .../byte_array_lz4_compression.worker.ts | 2 +- .../slow_byte_array_lz4_compression.worker.ts | 2 +- frontend/javascripts/router.tsx | 22 +- .../test/reducers/save_reducer.spec.ts | 8 +- .../test/sagas/saga_integration.spec.ts | 8 +- .../javascripts/test/sagas/save_saga.spec.ts | 16 +- frontend/javascripts/theme.tsx | 10 +- frontend/javascripts/types/api_flow_types.ts | 38 +-- .../dataset_view_configuration.schema.ts | 2 +- .../dataset_view_configuration_defaults.ts | 6 +- .../types/schemas/user_settings.schema.ts | 6 +- frontend/javascripts/types/validation.ts | 4 +- package.json | 6 +- 369 files changed, 3927 insertions(+), 3918 deletions(-) diff --git a/biome.json b/biome.json index d12455867a9..6a10b4cb84d 100644 --- a/biome.json +++ b/biome.json @@ -1,6 +1,12 @@ { "$schema": "./node_modules/@biomejs/biome/configuration_schema.json", - "organizeImports": { "enabled": false }, + "organizeImports": { + "enabled": true, + "ignore": [ + "frontend/javascripts/test/*", + "frontend/javascripts/oxalis/model/helpers/shader_editor.ts" + ] + }, "linter": { "enabled": true, "ignore": [ @@ -12,7 +18,8 @@ "libs/fps_meter.ts", "libs/parse_stl_buffer.ts", "libs/trackball_controls.ts", - "*.d.ts" + "*.d.ts", + "frontend/javascripts/test/snapshots/*" ], "rules": { "recommended": true, @@ -49,7 +56,8 @@ "useValidAnchor": "off", "useKeyWithClickEvents": "off", "useIframeTitle": "off", - "noAriaHiddenOnFocusable": "off" + "noAriaHiddenOnFocusable": "off", + "noLabelWithoutControl": "off" }, "performance": { "noDelete": "off" @@ -57,8 +65,7 @@ "correctness": { "noUnusedVariables": "error", "noRenderReturnValue": "off", - "useHookAtTopLevel": "error", - "noUnusedImports": "error" + "useHookAtTopLevel": "error" }, "suspicious": { "noExplicitAny": "off", diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 6f46037c19c..831ac8d7dc2 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1,32 +1,63 @@ -import ResumableJS from "resumablejs"; -import _ from "lodash"; import dayjs from "dayjs"; +import { V3 } from "libs/mjs"; +import type { RequestOptions } from "libs/request"; +import Request from "libs/request"; +import type { Message } from "libs/toast"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import window, { location } from "libs/window"; +import _ from "lodash"; +import messages from "messages"; +import type { AnnotationTypeFilterEnum, LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; +import Constants, { ControlModeEnum, AnnotationStateFilterEnum } from "oxalis/constants"; +import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; +import { + parseProtoAnnotation, + parseProtoListOfLong, + parseProtoTracing, + serializeProtoListOfLong, +} from "oxalis/model/helpers/proto_helpers"; +import type { + DatasetConfiguration, + Mapping, + MappingType, + NumberLike, + PartialDatasetConfiguration, + TraceOrViewCommand, + Tracing, + UserConfiguration, + VolumeTracing, +} from "oxalis/store"; +import ResumableJS from "resumablejs"; import { type APIAnnotation, type APIAnnotationInfo, type APIAnnotationType, type APIAnnotationVisibility, + type APIAvailableTasksReport, type APIBuildInfo, + type APICompoundType, type APIConnectomeFile, type APIDataSource, + type APIDataSourceId, type APIDataStore, type APIDataset, - type APIDataSourceId, + type APIDatasetCompact, type APIFeatureToggles, type APIHistogramData, + type APIMagRestrictions, type APIMapping, type APIMaybeUnimportedDataset, type APIMeshFile, - type APIAvailableTasksReport, type APIOrganization, type APIOrganizationCompact, + type APIPricingPlanStatus, type APIProject, type APIProjectCreator, type APIProjectProgressReport, type APIProjectUpdater, type APIProjectWithStatus, type APIPublication, - type APIMagRestrictions, type APIScript, type APIScriptCreator, type APIScriptUpdater, @@ -34,70 +65,39 @@ import { type APITeam, type APITimeInterval, type APITimeTrackingPerAnnotation, + type APITimeTrackingPerUser, type APITimeTrackingSpan, type APITracingStore, + type APITracingStoreAnnotation, type APIUpdateActionBatch, type APIUser, + type APIUserCompact, type APIUserLoggedTime, type APIUserTheme, + type AdditionalCoordinate, type AnnotationLayerDescriptor, + AnnotationLayerEnum, type AnnotationViewConfiguration, type ExperienceDomainList, + type LayerLink, + type MaintenanceInfo, + type ServerEditableMapping, type ServerTracing, + type ShortLink, type TracingType, - type ServerEditableMapping, - type APICompoundType, - type ZarrPrivateLink, - type VoxelyticsWorkflowReport, + type VoxelSize, type VoxelyticsChunkStatistics, - type ShortLink, - type VoxelyticsWorkflowListing, - type APIPricingPlanStatus, type VoxelyticsLogLine, - type APIUserCompact, - type APIDatasetCompact, - type MaintenanceInfo, - type AdditionalCoordinate, - type LayerLink, - type VoxelSize, - type APITimeTrackingPerUser, - AnnotationLayerEnum, - type APITracingStoreAnnotation, + type VoxelyticsWorkflowListing, + type VoxelyticsWorkflowReport, + type ZarrPrivateLink, } from "types/api_flow_types"; -import type { AnnotationTypeFilterEnum, LOG_LEVELS, Vector2, Vector3 } from "oxalis/constants"; -import Constants, { ControlModeEnum, AnnotationStateFilterEnum } from "oxalis/constants"; -import type { - DatasetConfiguration, - PartialDatasetConfiguration, - Tracing, - TraceOrViewCommand, - MappingType, - VolumeTracing, - UserConfiguration, - Mapping, - NumberLike, -} from "oxalis/store"; -import { V3 } from "libs/mjs"; +import type { ArbitraryObject } from "types/globals"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; -import { - parseProtoAnnotation, - parseProtoListOfLong, - parseProtoTracing, - serializeProtoListOfLong, -} from "oxalis/model/helpers/proto_helpers"; -import type { RequestOptions } from "libs/request"; -import Request from "libs/request"; -import type { Message } from "libs/toast"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import messages from "messages"; -import window, { location } from "libs/window"; import type { DatasourceConfiguration } from "types/schemas/datasource.types"; -import { doWithToken } from "./api/token"; -import type BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; -import type { ArbitraryObject } from "types/globals"; import { assertResponseLimit } from "./api/api_utils"; import { getDatasetIdFromNameAndOrganization } from "./api/disambiguate_legacy_routes"; +import { doWithToken } from "./api/token"; export * from "./api/token"; export * from "./api/jobs"; diff --git a/frontend/javascripts/admin/api/folders.ts b/frontend/javascripts/admin/api/folders.ts index 26fe25bbd00..f57bf4dea47 100644 --- a/frontend/javascripts/admin/api/folders.ts +++ b/frontend/javascripts/admin/api/folders.ts @@ -1,5 +1,5 @@ import Request from "libs/request"; -import type { Folder, FlatFolderTreeItem, FolderUpdater } from "types/api_flow_types"; +import type { FlatFolderTreeItem, Folder, FolderUpdater } from "types/api_flow_types"; export function getFolder(folderId: string): Promise { return Request.receiveJSON(`/api/folders/${folderId}`); diff --git a/frontend/javascripts/admin/api/jobs.ts b/frontend/javascripts/admin/api/jobs.ts index 3327d61884f..afb434a1699 100644 --- a/frontend/javascripts/admin/api/jobs.ts +++ b/frontend/javascripts/admin/api/jobs.ts @@ -3,13 +3,13 @@ import { location } from "libs/window"; import type { UnitLong, Vector3, Vector6 } from "oxalis/constants"; import type { APIAnnotationType, + APIEffectiveJobState, APIJob, - APIJobState, APIJobManualState, - APIEffectiveJobState, + APIJobState, + AdditionalCoordinate, AiModel, RenderAnimationOptions, - AdditionalCoordinate, } from "types/api_flow_types"; import { assertResponseLimit } from "./api_utils"; diff --git a/frontend/javascripts/admin/api/tasks.ts b/frontend/javascripts/admin/api/tasks.ts index a50ce4fd4d2..991859f063c 100644 --- a/frontend/javascripts/admin/api/tasks.ts +++ b/frontend/javascripts/admin/api/tasks.ts @@ -1,21 +1,21 @@ +import { finishAnnotation } from "admin/admin_rest_api"; import type { - APIActiveUser, - APIAnnotation, - APIAnnotationWithTask, - APITask, -} from "types/api_flow_types"; -import { APIAnnotationTypeEnum } from "types/api_flow_types"; -import type { - NewTask, NewNmlTask, + NewTask, TaskCreationResponseContainer, } from "admin/task/task_create_bulk_view"; import type { QueryObject } from "admin/task/task_search_form"; import type { RequestOptions } from "libs/request"; import Request from "libs/request"; import * as Utils from "libs/utils"; +import type { + APIActiveUser, + APIAnnotation, + APIAnnotationWithTask, + APITask, +} from "types/api_flow_types"; +import { APIAnnotationTypeEnum } from "types/api_flow_types"; import { assertResponseLimit } from "./api_utils"; -import { finishAnnotation } from "admin/admin_rest_api"; export function peekNextTasks(): Promise { return Request.receiveJSON("/api/user/tasks/peek"); diff --git a/frontend/javascripts/admin/api/token.ts b/frontend/javascripts/admin/api/token.ts index 3a430d55756..43db8c33abc 100644 --- a/frontend/javascripts/admin/api/token.ts +++ b/frontend/javascripts/admin/api/token.ts @@ -1,6 +1,6 @@ -import { location } from "libs/window"; import Request from "libs/request"; import * as Utils from "libs/utils"; +import { location } from "libs/window"; const MAX_TOKEN_RETRY_ATTEMPTS = 3; diff --git a/frontend/javascripts/admin/auth/accept_invite_view.tsx b/frontend/javascripts/admin/auth/accept_invite_view.tsx index b99bd3acf45..73eff347252 100644 --- a/frontend/javascripts/admin/auth/accept_invite_view.tsx +++ b/frontend/javascripts/admin/auth/accept_invite_view.tsx @@ -1,18 +1,18 @@ -import { Button, Result, Layout, Spin } from "antd"; import { GiftTwoTone } from "@ant-design/icons"; -import { useHistory } from "react-router-dom"; -import { AsyncButton } from "components/async_clickables"; -import { useState } from "react"; -import AuthenticationModal from "admin/auth/authentication_modal"; -import { useFetch } from "libs/react_helpers"; import { getOrganizationByInvite, joinOrganization, switchToOrganization, } from "admin/admin_rest_api"; -import type { APIUser } from "types/api_flow_types"; +import AuthenticationModal from "admin/auth/authentication_modal"; +import { Button, Layout, Result, Spin } from "antd"; +import { AsyncButton } from "components/async_clickables"; +import { useFetch } from "libs/react_helpers"; import Toast from "libs/toast"; import { location } from "libs/window"; +import { useState } from "react"; +import { useHistory } from "react-router-dom"; +import type { APIUser } from "types/api_flow_types"; const { Content } = Layout; diff --git a/frontend/javascripts/admin/auth/auth_token_view.tsx b/frontend/javascripts/admin/auth/auth_token_view.tsx index 821fb40b11e..fed30de538c 100644 --- a/frontend/javascripts/admin/auth/auth_token_view.tsx +++ b/frontend/javascripts/admin/auth/auth_token_view.tsx @@ -1,9 +1,9 @@ -import { useState, useEffect } from "react"; import { CopyOutlined, SwapOutlined } from "@ant-design/icons"; -import { Input, Button, Col, Row, Spin, Form, Space } from "antd"; import { getAuthToken, revokeAuthToken } from "admin/admin_rest_api"; -import type { OxalisState } from "oxalis/store"; +import { Button, Col, Form, Input, Row, Space, Spin } from "antd"; import Toast from "libs/toast"; +import type { OxalisState } from "oxalis/store"; +import { useEffect, useState } from "react"; import { useSelector } from "react-redux"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/auth/authentication_modal.tsx b/frontend/javascripts/admin/auth/authentication_modal.tsx index d2aecfd824b..d217ce0ffb4 100644 --- a/frontend/javascripts/admin/auth/authentication_modal.tsx +++ b/frontend/javascripts/admin/auth/authentication_modal.tsx @@ -1,13 +1,13 @@ -import { Modal, Alert } from "antd"; -import type { ComponentType } from "react"; -import React, { useState } from "react"; -import Toast from "libs/toast"; -import messages from "messages"; -import features from "features"; import RegistrationFormWKOrg from "admin/auth/registration_form_wkorg"; +import { Alert, Modal } from "antd"; import LinkButton from "components/link_button"; -import RegistrationFormGeneric from "./registration_form_generic"; +import features from "features"; +import Toast from "libs/toast"; +import messages from "messages"; +import type { ComponentType } from "react"; +import React, { useState } from "react"; import LoginForm from "./login_form"; +import RegistrationFormGeneric from "./registration_form_generic"; type Props = { onLoggedIn: (userJustRegistered: boolean) => unknown; onCancel: () => void; diff --git a/frontend/javascripts/admin/auth/change_password_view.tsx b/frontend/javascripts/admin/auth/change_password_view.tsx index 6495f45886b..e41b5d1261f 100644 --- a/frontend/javascripts/admin/auth/change_password_view.tsx +++ b/frontend/javascripts/admin/auth/change_password_view.tsx @@ -1,11 +1,11 @@ -import { type RouteComponentProps, withRouter } from "react-router-dom"; -import { Form, Input, Button, Col, Row, Alert } from "antd"; import { LockOutlined } from "@ant-design/icons"; +import { Alert, Button, Col, Form, Input, Row } from "antd"; import Request from "libs/request"; -import messages from "messages"; import Toast from "libs/toast"; +import messages from "messages"; import { logoutUserAction } from "oxalis/model/actions/user_actions"; import Store from "oxalis/store"; +import { type RouteComponentProps, withRouter } from "react-router-dom"; const FormItem = Form.Item; const { Password } = Input; diff --git a/frontend/javascripts/admin/auth/finish_reset_password_view.tsx b/frontend/javascripts/admin/auth/finish_reset_password_view.tsx index eace6fb4bfe..9b7e44d2cb9 100644 --- a/frontend/javascripts/admin/auth/finish_reset_password_view.tsx +++ b/frontend/javascripts/admin/auth/finish_reset_password_view.tsx @@ -1,9 +1,9 @@ -import { type RouteComponentProps, withRouter } from "react-router-dom"; -import { Form, Input, Button, Col, Row, Card } from "antd"; import { LockOutlined } from "@ant-design/icons"; +import { Button, Card, Col, Form, Input, Row } from "antd"; import Request from "libs/request"; -import messages from "messages"; import Toast from "libs/toast"; +import messages from "messages"; +import { type RouteComponentProps, withRouter } from "react-router-dom"; const FormItem = Form.Item; const { Password } = Input; type Props = { diff --git a/frontend/javascripts/admin/auth/login_form.tsx b/frontend/javascripts/admin/auth/login_form.tsx index 589a215f541..a1cbef92cb9 100644 --- a/frontend/javascripts/admin/auth/login_form.tsx +++ b/frontend/javascripts/admin/auth/login_form.tsx @@ -1,13 +1,13 @@ -import { Alert, Button, Form, Input } from "antd"; import { LockOutlined, MailOutlined } from "@ant-design/icons"; -import { Link } from "react-router-dom"; -import { getIsInIframe } from "libs/utils"; import { loginUser, requestSingleSignOnLogin } from "admin/admin_rest_api"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import Store from "oxalis/store"; -import messages from "messages"; +import { Alert, Button, Form, Input } from "antd"; import features from "features"; +import { getIsInIframe } from "libs/utils"; +import messages from "messages"; import { setActiveOrganizationAction } from "oxalis/model/actions/organization_actions"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import Store from "oxalis/store"; +import { Link } from "react-router-dom"; const FormItem = Form.Item; const { Password } = Input; diff --git a/frontend/javascripts/admin/auth/login_view.tsx b/frontend/javascripts/admin/auth/login_view.tsx index 1336a9e059a..5d69f1d4a94 100644 --- a/frontend/javascripts/admin/auth/login_view.tsx +++ b/frontend/javascripts/admin/auth/login_view.tsx @@ -1,8 +1,8 @@ import { Card, Col, Row } from "antd"; -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; import * as Utils from "libs/utils"; import window from "libs/window"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; import LoginForm from "./login_form"; type Props = { diff --git a/frontend/javascripts/admin/auth/registration_form_generic.tsx b/frontend/javascripts/admin/auth/registration_form_generic.tsx index dbb6c877a55..e995c6e15dd 100644 --- a/frontend/javascripts/admin/auth/registration_form_generic.tsx +++ b/frontend/javascripts/admin/auth/registration_form_generic.tsx @@ -1,16 +1,16 @@ -import { Form, Input, Button, Row, Col, Checkbox } from "antd"; -import { LockOutlined, UserOutlined, MailOutlined } from "@ant-design/icons"; -import React from "react"; -import type { APIOrganization } from "types/api_flow_types"; +import { LockOutlined, MailOutlined, UserOutlined } from "@ant-design/icons"; import { loginUser } from "admin/admin_rest_api"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import { getTermsOfService } from "admin/api/terms_of_service"; +import { Button, Checkbox, Col, Form, Input, Row } from "antd"; +import { useFetch } from "libs/react_helpers"; import Request from "libs/request"; -import Store from "oxalis/throttled_store"; import messages from "messages"; -import { setHasOrganizationsAction } from "oxalis/model/actions/ui_actions"; import { setActiveOrganizationAction } from "oxalis/model/actions/organization_actions"; -import { useFetch } from "libs/react_helpers"; -import { getTermsOfService } from "admin/api/terms_of_service"; +import { setHasOrganizationsAction } from "oxalis/model/actions/ui_actions"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import Store from "oxalis/throttled_store"; +import React from "react"; +import type { APIOrganization } from "types/api_flow_types"; import { TOSCheckFormItem } from "./tos_check_form_item"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/auth/registration_form_wkorg.tsx b/frontend/javascripts/admin/auth/registration_form_wkorg.tsx index 633e7613d0d..db6b041ce91 100644 --- a/frontend/javascripts/admin/auth/registration_form_wkorg.tsx +++ b/frontend/javascripts/admin/auth/registration_form_wkorg.tsx @@ -1,14 +1,14 @@ -import { Form, Input, Button, Row, Col, Checkbox } from "antd"; -import { UserOutlined, LockOutlined, MailOutlined } from "@ant-design/icons"; -import { useRef, memo } from "react"; +import { LockOutlined, MailOutlined, UserOutlined } from "@ant-design/icons"; import { loginUser } from "admin/admin_rest_api"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import { getTermsOfService } from "admin/api/terms_of_service"; +import { Button, Checkbox, Col, Form, Input, Row } from "antd"; +import { useFetch } from "libs/react_helpers"; import Request from "libs/request"; -import Store from "oxalis/throttled_store"; import messages from "messages"; import { setActiveOrganizationAction } from "oxalis/model/actions/organization_actions"; -import { useFetch } from "libs/react_helpers"; -import { getTermsOfService } from "admin/api/terms_of_service"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import Store from "oxalis/throttled_store"; +import { memo, useRef } from "react"; import { TOSCheckFormItem } from "./tos_check_form_item"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/auth/registration_view.tsx b/frontend/javascripts/admin/auth/registration_view.tsx index 25c32f23839..9a4476b0947 100644 --- a/frontend/javascripts/admin/auth/registration_view.tsx +++ b/frontend/javascripts/admin/auth/registration_view.tsx @@ -1,12 +1,12 @@ -import { useEffect, useState } from "react"; -import { Link, useHistory } from "react-router-dom"; -import { Spin, Row, Col, Card } from "antd"; -import messages from "messages"; -import Toast from "libs/toast"; import { getDefaultOrganization } from "admin/admin_rest_api"; -import features from "features"; -import RegistrationFormWKOrg from "admin/auth/registration_form_wkorg"; import RegistrationFormGeneric from "admin/auth/registration_form_generic"; +import RegistrationFormWKOrg from "admin/auth/registration_form_wkorg"; +import { Card, Col, Row, Spin } from "antd"; +import features from "features"; +import Toast from "libs/toast"; +import messages from "messages"; +import { useEffect, useState } from "react"; +import { Link, useHistory } from "react-router-dom"; import type { APIOrganization } from "types/api_flow_types"; function RegistrationViewGeneric() { diff --git a/frontend/javascripts/admin/auth/start_reset_password_view.tsx b/frontend/javascripts/admin/auth/start_reset_password_view.tsx index f9d38f07161..2dec3b70684 100644 --- a/frontend/javascripts/admin/auth/start_reset_password_view.tsx +++ b/frontend/javascripts/admin/auth/start_reset_password_view.tsx @@ -1,9 +1,9 @@ -import { Link, type RouteComponentProps, withRouter } from "react-router-dom"; -import { Form, Input, Button, Col, Row, Card } from "antd"; import { MailOutlined } from "@ant-design/icons"; +import { Button, Card, Col, Form, Input, Row } from "antd"; import Request from "libs/request"; -import messages from "messages"; import Toast from "libs/toast"; +import messages from "messages"; +import { Link, type RouteComponentProps, withRouter } from "react-router-dom"; const FormItem = Form.Item; type Props = { history: RouteComponentProps["history"]; diff --git a/frontend/javascripts/admin/auth/verify_email_view.tsx b/frontend/javascripts/admin/auth/verify_email_view.tsx index 6ea36e27bc9..c55833649c2 100644 --- a/frontend/javascripts/admin/auth/verify_email_view.tsx +++ b/frontend/javascripts/admin/auth/verify_email_view.tsx @@ -1,11 +1,11 @@ +import { requestVerificationMail, verifyEmail } from "admin/admin_rest_api"; import { Spin } from "antd"; -import { useEffect } from "react"; import { useFetch } from "libs/react_helpers"; -import { requestVerificationMail, verifyEmail } from "admin/admin_rest_api"; -import Toast from "libs/toast"; import type { ServerErrorMessage } from "libs/request"; -import { useHistory } from "react-router-dom"; +import Toast from "libs/toast"; import { Store } from "oxalis/singletons"; +import { useEffect } from "react"; +import { useHistory } from "react-router-dom"; export const VERIFICATION_ERROR_TOAST_KEY = "verificationError"; diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx index 22c252d411e..c55db92e291 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -2,20 +2,20 @@ import { FileExcelOutlined } from "@ant-design/icons"; import { Button, Upload } from "antd"; import type { UploadChangeParam, UploadFile } from "antd/lib/upload"; import { AsyncButton } from "components/async_clickables"; +import ErrorHandling from "libs/error_handling"; import { readFileAsText } from "libs/read_file"; import Toast from "libs/toast"; import { SoftError } from "libs/utils"; +import * as Utils from "libs/utils"; import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; import { - tryToFetchDatasetsByNameOrId, + type FileList, type WizardComponentProps, type WizardContext, - type FileList, + tryToFetchDatasetsByNameOrId, } from "./common"; -import ErrorHandling from "libs/error_handling"; -import * as Utils from "libs/utils"; const EXPECTED_VALUE_COUNT_PER_CSV_LINE = 8; diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx index ed656a2cc33..2f3fbac698a 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -3,9 +3,9 @@ import { AsyncButton } from "components/async_clickables"; import DatasetSelectionComponent, { type DatasetSelectionValue, } from "dashboard/dataset/dataset_selection_component"; -import { useState } from "react"; -import { tryToFetchDatasetsByNameOrId, type WizardComponentProps } from "./common"; import { useEffectOnlyOnce } from "libs/react_hooks"; +import { useState } from "react"; +import { type WizardComponentProps, tryToFetchDatasetsByNameOrId } from "./common"; export default function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { const [datasetValues, setDatasetValues] = useState([]); diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx index 0263630a89d..75a75f551b8 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -20,22 +20,22 @@ import { import { FormItemWithInfo } from "dashboard/dataset/helper_components"; import FolderSelection from "dashboard/folders/folder_selection"; import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; +import { formatNumber } from "libs/format_utils"; +import { useEffectOnlyOnce } from "libs/react_hooks"; import Toast, { guardedWithErrorToast } from "libs/toast"; import * as Utils from "libs/utils"; import _ from "lodash"; import messages from "messages"; +import { WkDevFlags } from "oxalis/api/wk_dev"; +import type { Vector3 } from "oxalis/constants"; import { flatToNestedMatrix, getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import { checkLandmarksForThinPlateSpline } from "oxalis/model/helpers/transformation_helpers"; import type { OxalisState } from "oxalis/store"; import React, { useState } from "react"; import { useSelector } from "react-redux"; import type { APIDataLayer, APIDataset, APITeam, LayerLink } from "types/api_flow_types"; import { syncValidator } from "types/validation"; import type { WizardComponentProps } from "./common"; -import { useEffectOnlyOnce } from "libs/react_hooks"; -import { formatNumber } from "libs/format_utils"; -import { checkLandmarksForThinPlateSpline } from "oxalis/model/helpers/transformation_helpers"; -import type { Vector3 } from "oxalis/constants"; -import { WkDevFlags } from "oxalis/api/wk_dev"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts index 745757b0642..16634cceec6 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/common.ts +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -3,7 +3,7 @@ import type { UploadFile } from "antd"; import Toast from "libs/toast"; import type { Vector3 } from "oxalis/constants"; import { Store } from "oxalis/singletons"; -import type { APIDataset, APIDataStore } from "types/api_flow_types"; +import type { APIDataStore, APIDataset } from "types/api_flow_types"; export type FileList = UploadFile[]; diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index 847e7202820..22ec94aa253 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -1,45 +1,45 @@ +import { UnlockOutlined } from "@ant-design/icons"; +import { exploreRemoteDataset, isDatasetNameValid, storeRemoteDataset } from "admin/admin_rest_api"; +import { CardContainer, DatastoreFormItem } from "admin/dataset/dataset_components"; import { - Form, - Input, Button, Col, - Radio, - Row, Collapse, - type FormInstance, - Modal, Divider, + Form, + type FormInstance, + Input, List, + Modal, + Radio, + Row, Upload, } from "antd"; -import { connect } from "react-redux"; -import React, { useEffect, useState } from "react"; -import type { APIDataStore, APIUser } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { exploreRemoteDataset, isDatasetNameValid, storeRemoteDataset } from "admin/admin_rest_api"; -import messages from "messages"; -import { jsonStringify } from "libs/utils"; -import { CardContainer, DatastoreFormItem } from "admin/dataset/dataset_components"; +import type { RcFile, UploadChangeParam, UploadFile } from "antd/lib/upload"; import { AsyncButton } from "components/async_clickables"; -import Toast from "libs/toast"; -import _ from "lodash"; -import { Hint } from "oxalis/view/action-bar/download_modal_view"; -import { formatScale } from "libs/format_utils"; -import type { DataLayer, DatasourceConfiguration } from "types/schemas/datasource.types"; +import BrainSpinner from "components/brain_spinner"; import DatasetSettingsDataTab, { // Sync simple with advanced and get newest datasourceJson syncDataSourceFields, } from "dashboard/dataset/dataset_settings_data_tab"; import { FormItemWithInfo, Hideable } from "dashboard/dataset/helper_components"; import FolderSelection from "dashboard/folders/folder_selection"; -import type { RcFile, UploadChangeParam, UploadFile } from "antd/lib/upload"; -import { UnlockOutlined } from "@ant-design/icons"; -import { Unicode } from "oxalis/constants"; +import { formatScale } from "libs/format_utils"; import { readFileAsText } from "libs/read_file"; +import Toast from "libs/toast"; +import { jsonStringify } from "libs/utils"; import * as Utils from "libs/utils"; -import type { ArbitraryObject } from "types/globals"; -import BrainSpinner from "components/brain_spinner"; +import _ from "lodash"; +import messages from "messages"; +import { Unicode } from "oxalis/constants"; +import type { OxalisState } from "oxalis/store"; +import { Hint } from "oxalis/view/action-bar/download_modal_view"; +import React, { useEffect, useState } from "react"; +import { connect } from "react-redux"; import { useHistory } from "react-router-dom"; +import type { APIDataStore, APIUser } from "types/api_flow_types"; +import type { ArbitraryObject } from "types/globals"; +import type { DataLayer, DatasourceConfiguration } from "types/schemas/datasource.types"; const FormItem = Form.Item; const RadioGroup = Radio.Group; diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index be14179c25a..78195989173 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -1,20 +1,20 @@ -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { Tabs, Modal, Button, Layout, type TabsProps } from "antd"; import { CopyOutlined, DatabaseOutlined, UploadOutlined } from "@ant-design/icons"; -import React, { useState } from "react"; -import { connect, useSelector } from "react-redux"; -import type { APIDataStore } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import { getDatastores } from "admin/admin_rest_api"; import DatasetAddRemoteView from "admin/dataset/dataset_add_remote_view"; import DatasetUploadView from "admin/dataset/dataset_upload_view"; +import { Button, Layout, Modal, Tabs, type TabsProps } from "antd"; import features from "features"; -import { getDatastores } from "admin/admin_rest_api"; -import { useFetch } from "libs/react_helpers"; -import DatasetAddComposeView from "./dataset_add_compose_view"; import type { History } from "history"; +import { useFetch } from "libs/react_helpers"; import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import type { OxalisState } from "oxalis/store"; +import React, { useState } from "react"; +import { connect, useSelector } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import type { APIDataStore } from "types/api_flow_types"; +import DatasetAddComposeView from "./dataset_add_compose_view"; const { Content, Sider } = Layout; diff --git a/frontend/javascripts/admin/dataset/dataset_components.tsx b/frontend/javascripts/admin/dataset/dataset_components.tsx index d6348b46942..2ba8da0a4b1 100644 --- a/frontend/javascripts/admin/dataset/dataset_components.tsx +++ b/frontend/javascripts/admin/dataset/dataset_components.tsx @@ -1,11 +1,11 @@ -import type * as React from "react"; -import { Form, Input, Select, Card, type FormInstance } from "antd"; -import messages from "messages"; -import type { APIDataStore, APITeam, APIUser } from "types/api_flow_types"; -import { syncValidator } from "types/validation"; +import { Card, Form, type FormInstance, Input, Select } from "antd"; import { FormItemWithInfo } from "dashboard/dataset/helper_components"; import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; import features from "features"; +import messages from "messages"; +import type * as React from "react"; +import type { APIDataStore, APITeam, APIUser } from "types/api_flow_types"; +import { syncValidator } from "types/validation"; const FormItem = Form.Item; export function CardContainer({ diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 4339cdc7472..63f8c83c9f3 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -1,75 +1,75 @@ import { - Popover, + FileOutlined, + FolderOutlined, + HourglassOutlined, + InboxOutlined, + InfoCircleOutlined, + LoadingOutlined, +} from "@ant-design/icons"; +import { + Alert, Avatar, - Form, Button, Col, - Row, + Form, + List, Modal, + Popover, Progress, - Alert, - List, - Spin, + Row, Select, + Spin, Tooltip, } from "antd"; -import type { Location as HistoryLocation, Action as HistoryAction } from "history"; -import { - InfoCircleOutlined, - FileOutlined, - FolderOutlined, - InboxOutlined, - HourglassOutlined, - LoadingOutlined, -} from "@ant-design/icons"; -import { connect } from "react-redux"; -import React from "react"; import dayjs from "dayjs"; +import type { Action as HistoryAction, Location as HistoryLocation } from "history"; +import React from "react"; +import { connect } from "react-redux"; -import classnames from "classnames"; -import _ from "lodash"; -import { useDropzone, type FileWithPath } from "react-dropzone"; -import ErrorHandling from "libs/error_handling"; -import { Link, type RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { - type APITeam, - type APIDataStore, - type APIUser, - type APIOrganization, - APIJobType, -} from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; import { - reserveDatasetUpload, - finishDatasetUpload, + type UnfinishedUpload, cancelDatasetUpload, createResumableUpload, - startConvertToWkwJob, + finishDatasetUpload, + getUnfinishedUploads, + reserveDatasetUpload, sendAnalyticsEvent, sendFailedRequestAnalyticsEvent, - getUnfinishedUploads, - type UnfinishedUpload, + startConvertToWkwJob, } from "admin/admin_rest_api"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import messages from "messages"; -import Zip from "libs/zipjs_wrapper"; import { AllowedTeamsFormItem, CardContainer, DatasetNameFormItem, DatastoreFormItem, } from "admin/dataset/dataset_components"; -import { Vector3Input } from "libs/vector_input"; -import features from "features"; -import { syncValidator } from "types/validation"; +import { hasPricingPlanExceededStorage } from "admin/organization/pricing_plan_utils"; import type { FormInstance } from "antd/lib/form"; -import { AllUnits, UnitLong, LongUnitToShortUnitMap, type Vector3 } from "oxalis/constants"; -import { FormItemWithInfo, confirmAsync } from "../../dashboard/dataset/helper_components"; +import classnames from "classnames"; import FolderSelection from "dashboard/folders/folder_selection"; -import { hasPricingPlanExceededStorage } from "admin/organization/pricing_plan_utils"; +import features from "features"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import { Vector3Input } from "libs/vector_input"; +import Zip from "libs/zipjs_wrapper"; +import _ from "lodash"; +import messages from "messages"; +import { AllUnits, LongUnitToShortUnitMap, UnitLong, type Vector3 } from "oxalis/constants"; import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; +import type { OxalisState } from "oxalis/store"; +import { type FileWithPath, useDropzone } from "react-dropzone"; +import { Link, type RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import { + type APIDataStore, + APIJobType, + type APIOrganization, + type APITeam, + type APIUser, +} from "types/api_flow_types"; +import { syncValidator } from "types/validation"; +import { FormItemWithInfo, confirmAsync } from "../../dashboard/dataset/helper_components"; const FormItem = Form.Item; const REPORT_THROTTLE_THRESHOLD = 1 * 60 * 1000; // 1 min diff --git a/frontend/javascripts/admin/dataset/dataset_url_import.tsx b/frontend/javascripts/admin/dataset/dataset_url_import.tsx index 1184bba6d0f..3c157fa4108 100644 --- a/frontend/javascripts/admin/dataset/dataset_url_import.tsx +++ b/frontend/javascripts/admin/dataset/dataset_url_import.tsx @@ -1,6 +1,6 @@ +import { getDatastores } from "admin/admin_rest_api"; import DatasetAddRemoteView from "admin/dataset/dataset_add_remote_view"; import { useFetch } from "libs/react_helpers"; -import { getDatastores } from "admin/admin_rest_api"; import * as Utils from "libs/utils"; import _ from "lodash"; import { useHistory } from "react-router-dom"; diff --git a/frontend/javascripts/admin/datastore_health_check.ts b/frontend/javascripts/admin/datastore_health_check.ts index 094c6146b0c..14811e0d13b 100644 --- a/frontend/javascripts/admin/datastore_health_check.ts +++ b/frontend/javascripts/admin/datastore_health_check.ts @@ -1,7 +1,7 @@ -import _ from "lodash"; -import Request from "libs/request"; import * as RestAPI from "admin/admin_rest_api"; +import Request from "libs/request"; import Toast from "libs/toast"; +import _ from "lodash"; import messages from "messages"; // Create a throttled function which depends on its arguments. diff --git a/frontend/javascripts/admin/job/job_hooks.ts b/frontend/javascripts/admin/job/job_hooks.ts index acc0fdb63ab..c1228106f9b 100644 --- a/frontend/javascripts/admin/job/job_hooks.ts +++ b/frontend/javascripts/admin/job/job_hooks.ts @@ -1,8 +1,8 @@ -import features from "features"; import { getJob, getJobs } from "admin/admin_rest_api"; +import features from "features"; +import { useEffectOnlyOnce, usePolling } from "libs/react_hooks"; import { useState } from "react"; import type { APIJob } from "types/api_flow_types"; -import { useEffectOnlyOnce, usePolling } from "libs/react_hooks"; type JobInfo = [jobKey: string, jobId: string]; diff --git a/frontend/javascripts/admin/job/job_list_view.tsx b/frontend/javascripts/admin/job/job_list_view.tsx index e5adaa4c642..5531f11fa59 100644 --- a/frontend/javascripts/admin/job/job_list_view.tsx +++ b/frontend/javascripts/admin/job/job_list_view.tsx @@ -1,30 +1,30 @@ -import _ from "lodash"; -import { PropTypes } from "@scalableminds/prop-types"; -import { confirmAsync } from "dashboard/dataset/helper_components"; -import { Link } from "react-router-dom"; -import { Table, Spin, Input, Tooltip, Typography } from "antd"; import { CheckCircleTwoTone, ClockCircleTwoTone, - CloseCircleTwoTone, CloseCircleOutlined, + CloseCircleTwoTone, DownloadOutlined, EyeOutlined, + InfoCircleOutlined, LoadingOutlined, QuestionCircleTwoTone, - InfoCircleOutlined, } from "@ant-design/icons"; -import type * as React from "react"; -import { type APIJob, APIJobType, type APIUserBase } from "types/api_flow_types"; -import { getJobs, cancelJob } from "admin/admin_rest_api"; -import Persistence from "libs/persistence"; -import * as Utils from "libs/utils"; -import FormattedDate from "components/formatted_date"; +import { PropTypes } from "@scalableminds/prop-types"; +import { cancelJob, getJobs } from "admin/admin_rest_api"; +import { Input, Spin, Table, Tooltip, Typography } from "antd"; import { AsyncLink } from "components/async_clickables"; -import { useEffect, useState } from "react"; -import { useInterval } from "libs/react_helpers"; +import FormattedDate from "components/formatted_date"; +import { confirmAsync } from "dashboard/dataset/helper_components"; import { formatWkLibsNdBBox } from "libs/format_utils"; +import Persistence from "libs/persistence"; +import { useInterval } from "libs/react_helpers"; +import * as Utils from "libs/utils"; +import _ from "lodash"; import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import type * as React from "react"; +import { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; +import { type APIJob, APIJobType, type APIUserBase } from "types/api_flow_types"; // Unfortunately, the twoToneColor (nor the style) prop don't support // CSS variables. diff --git a/frontend/javascripts/admin/onboarding.tsx b/frontend/javascripts/admin/onboarding.tsx index 4724cf9d97e..e81ac051e51 100644 --- a/frontend/javascripts/admin/onboarding.tsx +++ b/frontend/javascripts/admin/onboarding.tsx @@ -1,33 +1,33 @@ -import React, { useState } from "react"; -import { Form, Modal, Input, Button, Row, Col, Steps, Card, AutoComplete, Alert } from "antd"; import { - CloudUploadOutlined, - TeamOutlined, - UserOutlined, - FileAddOutlined, - RocketOutlined, ClockCircleOutlined, - PlayCircleOutlined, - PaperClipOutlined, + CloudUploadOutlined, CodeOutlined, CustomerServiceOutlined, + FileAddOutlined, + PaperClipOutlined, + PlayCircleOutlined, PlusOutlined, + RocketOutlined, + TeamOutlined, UserAddOutlined, + UserOutlined, } from "@ant-design/icons"; -import { Link, type RouteComponentProps, withRouter } from "react-router-dom"; -import { connect } from "react-redux"; -import type { APIUser, APIDataStore } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import Store from "oxalis/store"; -import LinkButton from "components/link_button"; import { getDatastores, sendInvitesForOrganization } from "admin/admin_rest_api"; -import DatasetSettingsView from "dashboard/dataset/dataset_settings_view"; -import DatasetUploadView from "admin/dataset/dataset_upload_view"; import RegistrationFormGeneric from "admin/auth/registration_form_generic"; +import DatasetUploadView from "admin/dataset/dataset_upload_view"; +import { maxInludedUsersInBasicPlan } from "admin/organization/pricing_plan_utils"; +import { Alert, AutoComplete, Button, Card, Col, Form, Input, Modal, Row, Steps } from "antd"; import CreditsFooter from "components/credits_footer"; -import Toast from "libs/toast"; +import LinkButton from "components/link_button"; +import DatasetSettingsView from "dashboard/dataset/dataset_settings_view"; import features from "features"; -import { maxInludedUsersInBasicPlan } from "admin/organization/pricing_plan_utils"; +import Toast from "libs/toast"; +import type { OxalisState } from "oxalis/store"; +import Store from "oxalis/store"; +import React, { useState } from "react"; +import { connect } from "react-redux"; +import { Link, type RouteComponentProps, withRouter } from "react-router-dom"; +import type { APIDataStore, APIUser } from "types/api_flow_types"; const { Step } = Steps; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/organization/organization_cards.tsx b/frontend/javascripts/admin/organization/organization_cards.tsx index 4952c4ec776..c8674546f7c 100644 --- a/frontend/javascripts/admin/organization/organization_cards.tsx +++ b/frontend/javascripts/admin/organization/organization_cards.tsx @@ -7,22 +7,22 @@ import { import { Alert, Button, Card, Col, Progress, Row } from "antd"; import { formatDateInLocalTimeZone } from "components/formatted_date"; import dayjs from "dayjs"; +import { formatCountToDataAmountUnit } from "libs/format_utils"; import Constants from "oxalis/constants"; import type { OxalisState } from "oxalis/store"; import type React from "react"; import { useSelector } from "react-redux"; import type { APIOrganization } from "types/api_flow_types"; import { + PricingPlanEnum, hasPricingPlanExceededStorage, hasPricingPlanExceededUsers, hasPricingPlanExpired, isUserAllowedToRequestUpgrades, powerPlanFeatures, - PricingPlanEnum, teamPlanFeatures, } from "./pricing_plan_utils"; import UpgradePricingPlanModal from "./upgrade_plan_modal"; -import { formatCountToDataAmountUnit } from "libs/format_utils"; export function TeamAndPowerPlanUpgradeCards({ teamUpgradeCallback, diff --git a/frontend/javascripts/admin/organization/organization_edit_view.tsx b/frontend/javascripts/admin/organization/organization_edit_view.tsx index d1dac4264a1..69f197f40cf 100644 --- a/frontend/javascripts/admin/organization/organization_edit_view.tsx +++ b/frontend/javascripts/admin/organization/organization_edit_view.tsx @@ -1,22 +1,24 @@ -import { useState, useEffect } from "react"; -import { connect } from "react-redux"; -import { Form, Button, Card, Input, Row, Col, Skeleton, Typography, Space } from "antd"; import { - MailOutlined, - TagOutlined, CopyOutlined, - SaveOutlined, IdcardOutlined, + MailOutlined, + SaveOutlined, + TagOutlined, UserOutlined, } from "@ant-design/icons"; -import { confirmAsync } from "dashboard/dataset/helper_components"; import { deleteOrganization, - updateOrganization, - getUsers, getPricingPlanStatus, + getUsers, + updateOrganization, } from "admin/admin_rest_api"; +import { Button, Card, Col, Form, Input, Row, Skeleton, Space, Typography } from "antd"; +import { confirmAsync } from "dashboard/dataset/helper_components"; import Toast from "libs/toast"; +import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; +import type { OxalisState } from "oxalis/store"; +import { useEffect, useState } from "react"; +import { connect } from "react-redux"; import type { APIOrganization, APIPricingPlanStatus } from "types/api_flow_types"; import { PlanAboutToExceedAlert, @@ -25,9 +27,7 @@ import { PlanExpirationCard, PlanUpgradeCard, } from "./organization_cards"; -import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; import { getActiveUserCount } from "./pricing_plan_utils"; -import type { OxalisState } from "oxalis/store"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/organization/upgrade_plan_modal.tsx b/frontend/javascripts/admin/organization/upgrade_plan_modal.tsx index 7c5a041eb9a..b8fa8f62a30 100644 --- a/frontend/javascripts/admin/organization/upgrade_plan_modal.tsx +++ b/frontend/javascripts/admin/organization/upgrade_plan_modal.tsx @@ -1,27 +1,27 @@ -import type React from "react"; -import { useRef } from "react"; -import { Button, Divider, InputNumber, Modal } from "antd"; -import dayjs from "dayjs"; import { DatabaseOutlined, FieldTimeOutlined, RocketOutlined, UserAddOutlined, } from "@ant-design/icons"; -import type { APIOrganization } from "types/api_flow_types"; -import { formatDateInLocalTimeZone } from "components/formatted_date"; import { sendExtendPricingPlanEmail, sendUpgradePricingPlanEmail, sendUpgradePricingPlanStorageEmail, sendUpgradePricingPlanUserEmail, } from "admin/admin_rest_api"; -import { powerPlanFeatures, teamPlanFeatures } from "./pricing_plan_utils"; -import { PricingPlanEnum } from "./pricing_plan_utils"; +import { Button, Divider, InputNumber, Modal } from "antd"; +import { formatDateInLocalTimeZone } from "components/formatted_date"; +import dayjs from "dayjs"; import renderIndependently from "libs/render_independently"; import Toast from "libs/toast"; -import { TeamAndPowerPlanUpgradeCards } from "./organization_cards"; import messages from "messages"; +import type React from "react"; +import { useRef } from "react"; +import type { APIOrganization } from "types/api_flow_types"; +import { TeamAndPowerPlanUpgradeCards } from "./organization_cards"; +import { powerPlanFeatures, teamPlanFeatures } from "./pricing_plan_utils"; +import { PricingPlanEnum } from "./pricing_plan_utils"; const ModalInformationFooter = ( <> diff --git a/frontend/javascripts/admin/project/project_create_view.tsx b/frontend/javascripts/admin/project/project_create_view.tsx index d740e20d27e..b3c76d40935 100644 --- a/frontend/javascripts/admin/project/project_create_view.tsx +++ b/frontend/javascripts/admin/project/project_create_view.tsx @@ -1,16 +1,16 @@ -import { Form, Input, Select, Button, Card, InputNumber, Checkbox } from "antd"; -import { useState, useEffect } from "react"; -import { useHistory } from "react-router-dom"; -import { useSelector } from "react-redux"; -import type { APIUser, APITeam } from "types/api_flow_types"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; import { - getUsers, - getEditableTeams, createProject, + getEditableTeams, getProject, + getUsers, updateProject, } from "admin/admin_rest_api"; +import { Button, Card, Checkbox, Form, Input, InputNumber, Select } from "antd"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import { useEffect, useState } from "react"; +import { useSelector } from "react-redux"; +import { useHistory } from "react-router-dom"; +import type { APITeam, APIUser } from "types/api_flow_types"; import { FormItemWithInfo } from "../../dashboard/dataset/helper_components"; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/project/project_list_view.tsx b/frontend/javascripts/admin/project/project_list_view.tsx index 61b35f655fa..793616fe416 100644 --- a/frontend/javascripts/admin/project/project_list_view.tsx +++ b/frontend/javascripts/admin/project/project_list_view.tsx @@ -1,6 +1,3 @@ -import { Link } from "react-router-dom"; -import { PropTypes } from "@scalableminds/prop-types"; -import { Table, Spin, Button, Input, Tooltip, App } from "antd"; import { DeleteOutlined, DownloadOutlined, @@ -13,38 +10,41 @@ import { ScheduleOutlined, TeamOutlined, } from "@ant-design/icons"; -import { connect } from "react-redux"; -import React, { useEffect, useState } from "react"; -import _ from "lodash"; -import { AsyncLink } from "components/async_clickables"; -import { - type APIProjectWithStatus, - type APIProject, - type APIUser, - type APIUserBase, - TracingTypeEnum, -} from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import { PropTypes } from "@scalableminds/prop-types"; import { - getProjectsWithStatus, + deleteProject as deleteProjectAPI, + downloadAnnotation, getProjectsForTaskType, + getProjectsWithStatus, + getTaskType, increaseProjectTaskInstances as increaseProjectTaskInstancesAPI, - deleteProject as deleteProjectAPI, pauseProject, resumeProject, - downloadAnnotation, - getTaskType, } from "admin/admin_rest_api"; -import Toast from "libs/toast"; +import { getTasks } from "admin/api/tasks"; +import TransferAllTasksModal from "admin/project/transfer_all_tasks_modal"; +import { App, Button, Input, Spin, Table, Tooltip } from "antd"; +import { AsyncLink } from "components/async_clickables"; +import FormattedDate from "components/formatted_date"; import { handleGenericError } from "libs/error_handling"; import Persistence from "libs/persistence"; -import TransferAllTasksModal from "admin/project/transfer_all_tasks_modal"; +import { useEffectOnlyOnce } from "libs/react_hooks"; +import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import FormattedDate from "components/formatted_date"; -import { useEffectOnlyOnce } from "libs/react_hooks"; -import { getTasks } from "admin/api/tasks"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import type { OxalisState } from "oxalis/store"; +import React, { useEffect, useState } from "react"; +import { connect } from "react-redux"; +import { Link } from "react-router-dom"; +import { + type APIProject, + type APIProjectWithStatus, + type APIUser, + type APIUserBase, + TracingTypeEnum, +} from "types/api_flow_types"; const { Column } = Table; const { Search } = Input; diff --git a/frontend/javascripts/admin/project/transfer_all_tasks_modal.tsx b/frontend/javascripts/admin/project/transfer_all_tasks_modal.tsx index caea72f5f45..639e4862649 100644 --- a/frontend/javascripts/admin/project/transfer_all_tasks_modal.tsx +++ b/frontend/javascripts/admin/project/transfer_all_tasks_modal.tsx @@ -1,14 +1,14 @@ -import { Modal, Table, Spin } from "antd"; -import { useState } from "react"; -import _ from "lodash"; -import type { APIUser, APIProject, APIActiveUser } from "types/api_flow_types"; import { getUsers } from "admin/admin_rest_api"; +import { getUsersWithActiveTasks, transferActiveTasksOfProject } from "admin/api/tasks"; +import UserSelectionComponent from "admin/user/user_selection_component"; +import { Modal, Spin, Table } from "antd"; import { handleGenericError } from "libs/error_handling"; +import { useFetch } from "libs/react_helpers"; import Toast from "libs/toast"; -import UserSelectionComponent from "admin/user/user_selection_component"; +import _ from "lodash"; import messages from "messages"; -import { useFetch } from "libs/react_helpers"; -import { getUsersWithActiveTasks, transferActiveTasksOfProject } from "admin/api/tasks"; +import { useState } from "react"; +import type { APIActiveUser, APIProject, APIUser } from "types/api_flow_types"; type Props = { project: APIProject | null | undefined; diff --git a/frontend/javascripts/admin/scripts/script_create_view.tsx b/frontend/javascripts/admin/scripts/script_create_view.tsx index 698106f389a..a4641d564cc 100644 --- a/frontend/javascripts/admin/scripts/script_create_view.tsx +++ b/frontend/javascripts/admin/scripts/script_create_view.tsx @@ -1,17 +1,17 @@ -import { Form, Input, Select, Button, Card } from "antd"; -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { connect } from "react-redux"; -import { useState, useEffect } from "react"; -import type { APIUser } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; import { - getTeamManagerOrAdminUsers, - updateScript, createScript, getScript, + getTeamManagerOrAdminUsers, + updateScript, } from "admin/admin_rest_api"; +import { Button, Card, Form, Input, Select } from "antd"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import type { OxalisState } from "oxalis/store"; +import { useEffect, useState } from "react"; +import { connect } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import type { APIUser } from "types/api_flow_types"; const FormItem = Form.Item; type OwnProps = { diff --git a/frontend/javascripts/admin/scripts/script_list_view.tsx b/frontend/javascripts/admin/scripts/script_list_view.tsx index cc09444a9c2..54c144fbc9a 100644 --- a/frontend/javascripts/admin/scripts/script_list_view.tsx +++ b/frontend/javascripts/admin/scripts/script_list_view.tsx @@ -1,17 +1,17 @@ -import { Link } from "react-router-dom"; -import { PropTypes } from "@scalableminds/prop-types"; -import { Table, Spin, Button, Input, App } from "antd"; import { DeleteOutlined, EditOutlined, PlusOutlined } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import type { APIScript, APIUser } from "types/api_flow_types"; -import { getScripts, deleteScript as deleteScriptAPI } from "admin/admin_rest_api"; -import { handleGenericError } from "libs/error_handling"; +import { PropTypes } from "@scalableminds/prop-types"; +import { deleteScript as deleteScriptAPI, getScripts } from "admin/admin_rest_api"; +import { App, Button, Input, Spin, Table } from "antd"; import LinkButton from "components/link_button"; +import { handleGenericError } from "libs/error_handling"; import Persistence from "libs/persistence"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; +import * as React from "react"; import { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; +import type { APIScript, APIUser } from "types/api_flow_types"; const { Column } = Table; const { Search } = Input; diff --git a/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx b/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx index f972d3f5a6d..6225f5c726c 100644 --- a/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx +++ b/frontend/javascripts/admin/statistic/available_tasks_report_view.tsx @@ -1,11 +1,11 @@ -import { Spin, Table, Card, Typography, Tooltip, Tag } from "antd"; -import { useState } from "react"; -import type { APIAvailableTasksReport } from "types/api_flow_types"; +import { InfoCircleOutlined } from "@ant-design/icons"; import { getAvailableTasksReport } from "admin/admin_rest_api"; +import { Card, Spin, Table, Tag, Tooltip, Typography } from "antd"; import { handleGenericError } from "libs/error_handling"; import * as Utils from "libs/utils"; +import { useState } from "react"; +import type { APIAvailableTasksReport } from "types/api_flow_types"; import TeamSelectionForm from "./team_selection_form"; -import { InfoCircleOutlined } from "@ant-design/icons"; const { Column } = Table; diff --git a/frontend/javascripts/admin/statistic/project_and_annotation_type_dropdown.tsx b/frontend/javascripts/admin/statistic/project_and_annotation_type_dropdown.tsx index fc339e8ba4a..8cf8d04cc11 100644 --- a/frontend/javascripts/admin/statistic/project_and_annotation_type_dropdown.tsx +++ b/frontend/javascripts/admin/statistic/project_and_annotation_type_dropdown.tsx @@ -1,12 +1,12 @@ -import { Select } from "antd"; -import type React from "react"; -import { useEffect, useState } from "react"; import { getProjects } from "admin/admin_rest_api"; +import { Select } from "antd"; import { useFetch } from "libs/react_helpers"; import { isUserAdminOrTeamManager } from "libs/utils"; -import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; import { AnnotationStateFilterEnum, AnnotationTypeFilterEnum } from "oxalis/constants"; +import type { OxalisState } from "oxalis/store"; +import type React from "react"; +import { useEffect, useState } from "react"; +import { useSelector } from "react-redux"; type ProjectAndTypeDropdownProps = { selectedProjectIds: string[]; diff --git a/frontend/javascripts/admin/statistic/project_progress_report_view.tsx b/frontend/javascripts/admin/statistic/project_progress_report_view.tsx index e1fddaece1a..d661f8c2d83 100644 --- a/frontend/javascripts/admin/statistic/project_progress_report_view.tsx +++ b/frontend/javascripts/admin/statistic/project_progress_report_view.tsx @@ -1,16 +1,16 @@ -import { Badge, Spin, Table, Card } from "antd"; import { PauseCircleOutlined, ReloadOutlined, SettingOutlined } from "@ant-design/icons"; -import * as React from "react"; -import type { APIProjectProgressReport, APITeam } from "types/api_flow_types"; import { getProjectProgressReport } from "admin/admin_rest_api"; +import { Badge, Card, Spin, Table } from "antd"; import FormattedDate from "components/formatted_date"; import Loop from "components/loop"; import StackedBarChart, { colors } from "components/stacked_bar_chart"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; import messages from "messages"; -import TeamSelectionForm from "./team_selection_form"; +import * as React from "react"; import { useState } from "react"; +import type { APIProjectProgressReport, APITeam } from "types/api_flow_types"; +import TeamSelectionForm from "./team_selection_form"; const { Column, ColumnGroup } = Table; const RELOAD_INTERVAL = 10 * 60 * 1000; // 10 min diff --git a/frontend/javascripts/admin/statistic/team_selection_form.tsx b/frontend/javascripts/admin/statistic/team_selection_form.tsx index e4cd706db51..b119604c144 100644 --- a/frontend/javascripts/admin/statistic/team_selection_form.tsx +++ b/frontend/javascripts/admin/statistic/team_selection_form.tsx @@ -1,6 +1,6 @@ -import { Row, Col, Form, Button } from "antd"; -import type { APITeam } from "types/api_flow_types"; +import { Button, Col, Form, Row } from "antd"; import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; +import type { APITeam } from "types/api_flow_types"; const FormItem = Form.Item; type Props = { value?: APITeam | null | undefined; diff --git a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx index 4dc0d246ead..5d87640ae29 100644 --- a/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx +++ b/frontend/javascripts/admin/statistic/time_tracking_detail_view.tsx @@ -1,12 +1,12 @@ -import { useFetch } from "libs/react_helpers"; import { getTimeTrackingForUserSummedPerAnnotation } from "admin/admin_rest_api"; -import dayjs from "dayjs"; import { Col, Divider, Row } from "antd"; +import dayjs from "dayjs"; import { formatMilliseconds } from "libs/format_utils"; +import { useFetch } from "libs/react_helpers"; import _ from "lodash"; -import type { APITimeTrackingPerAnnotation } from "types/api_flow_types"; +import type { AnnotationStateFilterEnum, AnnotationTypeFilterEnum } from "oxalis/constants"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; -import type { AnnotationTypeFilterEnum, AnnotationStateFilterEnum } from "oxalis/constants"; +import type { APITimeTrackingPerAnnotation } from "types/api_flow_types"; type TimeTrackingDetailViewProps = { userId: string; diff --git a/frontend/javascripts/admin/statistic/time_tracking_overview.tsx b/frontend/javascripts/admin/statistic/time_tracking_overview.tsx index a652c1cf973..91dcca8af44 100644 --- a/frontend/javascripts/admin/statistic/time_tracking_overview.tsx +++ b/frontend/javascripts/admin/statistic/time_tracking_overview.tsx @@ -1,23 +1,23 @@ -import { getTeams, getTimeEntries, getTimeTrackingForUserSpans } from "admin/admin_rest_api"; -import { Card, Select, Spin, Button, DatePicker, type TimeRangePickerProps, Table } from "antd"; -import { useFetch } from "libs/react_helpers"; -import { useState } from "react"; import { DownloadOutlined, FilterOutlined } from "@ant-design/icons"; +import { getTeams, getTimeEntries, getTimeTrackingForUserSpans } from "admin/admin_rest_api"; +import { Button, Card, DatePicker, Select, Spin, Table, type TimeRangePickerProps } from "antd"; +import FixedExpandableTable from "components/fixed_expandable_table"; +import LinkButton from "components/link_button"; +import dayjs, { type Dayjs } from "dayjs"; import saveAs from "file-saver"; import { formatMilliseconds } from "libs/format_utils"; -import ProjectAndAnnotationTypeDropdown from "./project_and_annotation_type_dropdown"; -import { isUserAdminOrTeamManager, transformToCSVRow } from "libs/utils"; -import messages from "messages"; +import { useFetch } from "libs/react_helpers"; import Toast from "libs/toast"; -import TimeTrackingDetailView from "./time_tracking_detail_view"; -import LinkButton from "components/link_button"; -import FixedExpandableTable from "components/fixed_expandable_table"; +import { isUserAdminOrTeamManager, transformToCSVRow } from "libs/utils"; import * as Utils from "libs/utils"; -import type { APITimeTrackingPerUser } from "types/api_flow_types"; -import { useSelector } from "react-redux"; +import messages from "messages"; +import { AnnotationStateFilterEnum, AnnotationTypeFilterEnum } from "oxalis/constants"; import type { OxalisState } from "oxalis/store"; -import dayjs, { type Dayjs } from "dayjs"; -import { AnnotationTypeFilterEnum, AnnotationStateFilterEnum } from "oxalis/constants"; +import { useState } from "react"; +import { useSelector } from "react-redux"; +import type { APITimeTrackingPerUser } from "types/api_flow_types"; +import ProjectAndAnnotationTypeDropdown from "./project_and_annotation_type_dropdown"; +import TimeTrackingDetailView from "./time_tracking_detail_view"; const { RangePicker } = DatePicker; const TIMETRACKING_CSV_HEADER_PER_USER = ["userId,userFirstName,userLastName,timeTrackedInSeconds"]; diff --git a/frontend/javascripts/admin/task/task_annotation_view.tsx b/frontend/javascripts/admin/task/task_annotation_view.tsx index e856eb85527..cf20ba685de 100644 --- a/frontend/javascripts/admin/task/task_annotation_view.tsx +++ b/frontend/javascripts/admin/task/task_annotation_view.tsx @@ -1,35 +1,35 @@ -import { App, Dropdown, type MenuProps, Tooltip } from "antd"; import { - EyeOutlined, - PlayCircleOutlined, CheckCircleOutlined, - TeamOutlined, - RollbackOutlined, - DeleteOutlined, - FolderOpenOutlined, - DownloadOutlined, ClockCircleOutlined, + DeleteOutlined, DownOutlined, + DownloadOutlined, + EyeOutlined, + FolderOpenOutlined, + PlayCircleOutlined, + RollbackOutlined, + TeamOutlined, } from "@ant-design/icons"; -import { connect } from "react-redux"; -import { useEffect, useState } from "react"; -import type { APIUser, APITask, APIAnnotation } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { formatSeconds } from "libs/format_utils"; -import { AsyncLink } from "components/async_clickables"; import { - reOpenAnnotation as reOpenAnnotationAPI, - finishAnnotation as finishAnnotationAPI, - resetAnnotation as resetAnnotationAPI, deleteAnnotation as deleteAnnotationAPI, downloadAnnotation as downloadAnnotationAPI, + finishAnnotation as finishAnnotationAPI, + reOpenAnnotation as reOpenAnnotationAPI, + resetAnnotation as resetAnnotationAPI, } from "admin/admin_rest_api"; +import { getAnnotationsForTask } from "admin/api/tasks"; +import { App, Dropdown, type MenuProps, Tooltip } from "antd"; +import { AsyncLink } from "components/async_clickables"; import FormattedDate from "components/formatted_date"; -import Toast from "libs/toast"; import TransferTaskModal from "dashboard/transfer_task_modal"; +import { formatSeconds } from "libs/format_utils"; +import Toast from "libs/toast"; import messages from "messages"; import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_accessor"; -import { getAnnotationsForTask } from "admin/api/tasks"; +import type { OxalisState } from "oxalis/store"; +import { useEffect, useState } from "react"; +import { connect } from "react-redux"; +import type { APIAnnotation, APITask, APIUser } from "types/api_flow_types"; type OwnProps = { task: APITask; diff --git a/frontend/javascripts/admin/task/task_create_bulk_view.tsx b/frontend/javascripts/admin/task/task_create_bulk_view.tsx index 70a0e2f85de..aa16089f392 100644 --- a/frontend/javascripts/admin/task/task_create_bulk_view.tsx +++ b/frontend/javascripts/admin/task/task_create_bulk_view.tsx @@ -1,14 +1,14 @@ -import { Form, Input, Button, Card, Upload, Spin, Progress, Divider, App } from "antd"; -import { useState } from "react"; import { InboxOutlined } from "@ant-design/icons"; -import _ from "lodash"; -import type { APITask } from "types/api_flow_types"; -import type { BoundingBoxObject } from "oxalis/store"; -import type { Vector3 } from "oxalis/constants"; import { createTasks } from "admin/api/tasks"; import { handleTaskCreationResponse } from "admin/task/task_create_form_view"; -import Messages from "messages"; +import { App, Button, Card, Divider, Form, Input, Progress, Spin, Upload } from "antd"; import Toast from "libs/toast"; +import _ from "lodash"; +import Messages from "messages"; +import type { Vector3 } from "oxalis/constants"; +import type { BoundingBoxObject } from "oxalis/store"; +import { useState } from "react"; +import type { APITask } from "types/api_flow_types"; const FormItem = Form.Item; const { TextArea } = Input; diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index d956d15abc7..6ff0bac4fcb 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -1,52 +1,52 @@ -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; +import { InboxOutlined, ReloadOutlined, WarningOutlined } from "@ant-design/icons"; import { - Row, + getActiveDatasetsOfMyOrganization, + getProjects, + getScripts, + getTaskTypes, + getUnversionedAnnotationInformation, +} from "admin/admin_rest_api"; +import { createTaskFromNML, createTasks, getTask, updateTask } from "admin/api/tasks"; +import type { + NewNmlTask, + NewTask, + TaskCreationResponse, + TaskCreationResponseContainer, +} from "admin/task/task_create_bulk_view"; +import { NUM_TASKS_PER_BATCH, normalizeFileEvent } from "admin/task/task_create_bulk_view"; +import { + App, + Button, + Card, Col, Divider, Form, - Select, - Button, - Card, - Radio, - Upload, - InputNumber, Input, - Spin, + InputNumber, + Radio, type RadioChangeEvent, + Row, + Select, + Spin, Tooltip, - App, + Upload, type UploadFile, } from "antd"; -import React, { useEffect, useState } from "react"; -import { InboxOutlined, ReloadOutlined, WarningOutlined } from "@ant-design/icons"; -import _ from "lodash"; -import type { APIDataset, APITaskType, APIProject, APIScript, APITask } from "types/api_flow_types"; -import type { BoundingBoxObject } from "oxalis/store"; -import type { - NewTask, - NewNmlTask, - TaskCreationResponse, - TaskCreationResponseContainer, -} from "admin/task/task_create_bulk_view"; -import { normalizeFileEvent, NUM_TASKS_PER_BATCH } from "admin/task/task_create_bulk_view"; -import { Vector3Input, Vector6Input } from "libs/vector_input"; -import type { Vector3, Vector6 } from "oxalis/constants"; -import { - getActiveDatasetsOfMyOrganization, - getUnversionedAnnotationInformation, - getProjects, - getScripts, - getTaskTypes, -} from "admin/admin_rest_api"; -import { createTaskFromNML, createTasks, getTask, updateTask } from "admin/api/tasks"; -import { coalesce, tryToAwaitPromise } from "libs/utils"; +import type { useAppProps } from "antd/es/app/context"; +import { AsyncButton } from "components/async_clickables"; +import { formatDateInLocalTimeZone } from "components/formatted_date"; import SelectExperienceDomain from "components/select_experience_domain"; -import messages from "messages"; import { saveAs } from "file-saver"; -import { formatDateInLocalTimeZone } from "components/formatted_date"; -import { AsyncButton } from "components/async_clickables"; -import type { useAppProps } from "antd/es/app/context"; +import { coalesce, tryToAwaitPromise } from "libs/utils"; +import { Vector3Input, Vector6Input } from "libs/vector_input"; +import _ from "lodash"; +import messages from "messages"; +import type { Vector3, Vector6 } from "oxalis/constants"; +import type { BoundingBoxObject } from "oxalis/store"; +import React, { useEffect, useState } from "react"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import type { APIDataset, APIProject, APIScript, APITask, APITaskType } from "types/api_flow_types"; const FormItem = Form.Item; const RadioGroup = Radio.Group; diff --git a/frontend/javascripts/admin/task/task_create_view.tsx b/frontend/javascripts/admin/task/task_create_view.tsx index a9f40ce6ef9..eb489cb2372 100644 --- a/frontend/javascripts/admin/task/task_create_view.tsx +++ b/frontend/javascripts/admin/task/task_create_view.tsx @@ -1,7 +1,7 @@ import { BarsOutlined, ScheduleOutlined } from "@ant-design/icons"; -import { Tabs, type TabsProps } from "antd"; import TaskCreateBulkView from "admin/task/task_create_bulk_view"; import TaskCreateFormView from "admin/task/task_create_form_view"; +import { Tabs, type TabsProps } from "antd"; const TaskCreateView = () => { const tabs: TabsProps["items"] = [ diff --git a/frontend/javascripts/admin/task/task_list_view.tsx b/frontend/javascripts/admin/task/task_list_view.tsx index 077d3a6600a..03635f7f207 100644 --- a/frontend/javascripts/admin/task/task_list_view.tsx +++ b/frontend/javascripts/admin/task/task_list_view.tsx @@ -1,6 +1,3 @@ -import { Link } from "react-router-dom"; -import { PropTypes } from "@scalableminds/prop-types"; -import { Tag, Spin, Button, Input, Modal, Card, Alert, App, type TableProps } from "antd"; import { CheckCircleOutlined, ClockCircleOutlined, @@ -13,32 +10,35 @@ import { PlusOutlined, UserAddOutlined, } from "@ant-design/icons"; -import type React from "react"; -import { useEffect, useState } from "react"; -import _ from "lodash"; -import features from "features"; -import { AsyncLink } from "components/async_clickables"; -import type { APITask, APITaskType, TaskStatus } from "types/api_flow_types"; +import { PropTypes } from "@scalableminds/prop-types"; import { downloadAnnotation as downloadAnnotationAPI } from "admin/admin_rest_api"; import { + assignTaskToUser as assignTaskToUserAPI, deleteTask as deleteTaskAPI, getTasks, - assignTaskToUser as assignTaskToUserAPI, } from "admin/api/tasks"; -import { formatTuple, formatSeconds } from "libs/format_utils"; -import { handleGenericError } from "libs/error_handling"; -import FormattedDate from "components/formatted_date"; -import Persistence from "libs/persistence"; import TaskAnnotationView from "admin/task/task_annotation_view"; -import LinkButton from "components/link_button"; import { downloadTasksAsCSV } from "admin/task/task_create_form_view"; import type { QueryObject, TaskFormFieldValues } from "admin/task/task_search_form"; import TaskSearchForm from "admin/task/task_search_form"; +import UserSelectionComponent from "admin/user/user_selection_component"; +import { Alert, App, Button, Card, Input, Modal, Spin, type TableProps, Tag } from "antd"; +import { AsyncLink } from "components/async_clickables"; +import FixedExpandableTable from "components/fixed_expandable_table"; +import FormattedDate from "components/formatted_date"; +import LinkButton from "components/link_button"; +import features from "features"; +import { handleGenericError } from "libs/error_handling"; +import { formatSeconds, formatTuple } from "libs/format_utils"; +import Persistence from "libs/persistence"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import FixedExpandableTable from "components/fixed_expandable_table"; -import UserSelectionComponent from "admin/user/user_selection_component"; +import type React from "react"; +import { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; +import type { APITask, APITaskType, TaskStatus } from "types/api_flow_types"; const { Search, TextArea } = Input; diff --git a/frontend/javascripts/admin/task/task_search_form.tsx b/frontend/javascripts/admin/task/task_search_form.tsx index e37df5bdbba..0c83722f815 100644 --- a/frontend/javascripts/admin/task/task_search_form.tsx +++ b/frontend/javascripts/admin/task/task_search_form.tsx @@ -1,12 +1,12 @@ -import { Form, Row, Dropdown, Col, Button, Input, Select } from "antd"; -import { DownloadOutlined, DownOutlined, RetweetOutlined } from "@ant-design/icons"; +import { DownOutlined, DownloadOutlined, RetweetOutlined } from "@ant-design/icons"; import { PropTypes } from "@scalableminds/prop-types"; -import { useEffect, useState } from "react"; -import _ from "lodash"; -import type { APIUser, APIProject, APITaskType } from "types/api_flow_types"; import { getEditableUsers, getProjects, getTaskTypes } from "admin/admin_rest_api"; +import { Button, Col, Dropdown, Form, Input, Row, Select } from "antd"; import Persistence from "libs/persistence"; import { useEffectOnlyOnce } from "libs/react_hooks"; +import _ from "lodash"; +import { useEffect, useState } from "react"; +import type { APIProject, APITaskType, APIUser } from "types/api_flow_types"; const FormItem = Form.Item; export type QueryObject = { diff --git a/frontend/javascripts/admin/tasktype/recommended_configuration_view.tsx b/frontend/javascripts/admin/tasktype/recommended_configuration_view.tsx index df0837315c8..8c76f9c0158 100644 --- a/frontend/javascripts/admin/tasktype/recommended_configuration_view.tsx +++ b/frontend/javascripts/admin/tasktype/recommended_configuration_view.tsx @@ -1,13 +1,13 @@ -import { Checkbox, Col, Collapse, Form, Input, Row, Table, Button, type CollapseProps } from "antd"; +import { Button, Checkbox, Col, Collapse, type CollapseProps, Form, Input, Row, Table } from "antd"; import type { FormInstance } from "antd/lib/form"; -import * as React from "react"; -import _ from "lodash"; import { jsonEditStyle } from "dashboard/dataset/helper_components"; +import features from "features"; import { jsonStringify } from "libs/utils"; -import { settings, type RecommendedConfiguration } from "messages"; -import { validateUserSettingsJSON } from "types/validation"; +import _ from "lodash"; +import { type RecommendedConfiguration, settings } from "messages"; import { TDViewDisplayModeEnum } from "oxalis/constants"; -import features from "features"; +import * as React from "react"; +import { validateUserSettingsJSON } from "types/validation"; const FormItem = Form.Item; function getRecommendedConfigByCategory() { diff --git a/frontend/javascripts/admin/tasktype/task_type_create_view.tsx b/frontend/javascripts/admin/tasktype/task_type_create_view.tsx index 081a1109ccb..52f33f7b025 100644 --- a/frontend/javascripts/admin/tasktype/task_type_create_view.tsx +++ b/frontend/javascripts/admin/tasktype/task_type_create_view.tsx @@ -1,30 +1,30 @@ -import { Button, Card, Checkbox, Form, Input, Radio, Select, InputNumber, Tooltip } from "antd"; -import { syncValidator } from "types/validation"; import { InfoCircleOutlined } from "@ant-design/icons"; -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { useEffect, useState } from "react"; -import _ from "lodash"; -import { - type APITaskType, - type TracingType, - TracingTypeEnum, - type APIAllowedMode, - type APIMagRestrictions, - type APITeam, -} from "types/api_flow_types"; import { - getEditableTeams, createTaskType, - updateTaskType, + getEditableTeams, getTaskType, + updateTaskType, } from "admin/admin_rest_api"; import RecommendedConfigurationView, { getDefaultRecommendedConfiguration, } from "admin/tasktype/recommended_configuration_view"; -import { useFetch } from "libs/react_helpers"; +import { Button, Card, Checkbox, Form, Input, InputNumber, Radio, Select, Tooltip } from "antd"; import type { RuleObject } from "antd/es/form"; +import { useFetch } from "libs/react_helpers"; import { jsonStringify } from "libs/utils"; +import _ from "lodash"; +import { useEffect, useState } from "react"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import { + type APIAllowedMode, + type APIMagRestrictions, + type APITaskType, + type APITeam, + type TracingType, + TracingTypeEnum, +} from "types/api_flow_types"; +import { syncValidator } from "types/validation"; const RadioGroup = Radio.Group; const FormItem = Form.Item; diff --git a/frontend/javascripts/admin/tasktype/task_type_list_view.tsx b/frontend/javascripts/admin/tasktype/task_type_list_view.tsx index 7a101a654fd..10bef9cd984 100644 --- a/frontend/javascripts/admin/tasktype/task_type_list_view.tsx +++ b/frontend/javascripts/admin/tasktype/task_type_list_view.tsx @@ -1,30 +1,30 @@ -import { Link } from "react-router-dom"; +import { + DeleteOutlined, + DownloadOutlined, + EditOutlined, + EyeOutlined, + PlusOutlined, + ScheduleOutlined, +} from "@ant-design/icons"; import { PropTypes } from "@scalableminds/prop-types"; -import { Table, Tag, Spin, Button, Input, App } from "antd"; -import Markdown from "libs/markdown_adapter"; -import * as React from "react"; -import _ from "lodash"; -import { AsyncLink } from "components/async_clickables"; -import type { APITaskType } from "types/api_flow_types"; import { - getTaskTypes, deleteTaskType as deleteTaskTypeAPI, downloadAnnotation, + getTaskTypes, } from "admin/admin_rest_api"; -import { handleGenericError } from "libs/error_handling"; +import { App, Button, Input, Spin, Table, Tag } from "antd"; +import { AsyncLink } from "components/async_clickables"; import LinkButton from "components/link_button"; +import { handleGenericError } from "libs/error_handling"; +import Markdown from "libs/markdown_adapter"; import Persistence from "libs/persistence"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import { - DeleteOutlined, - DownloadOutlined, - EditOutlined, - EyeOutlined, - PlusOutlined, - ScheduleOutlined, -} from "@ant-design/icons"; +import * as React from "react"; import { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; +import type { APITaskType } from "types/api_flow_types"; const { Column } = Table; const { Search } = Input; diff --git a/frontend/javascripts/admin/team/create_team_modal_view.tsx b/frontend/javascripts/admin/team/create_team_modal_view.tsx index 0bcdc293abc..84d122d9b9f 100644 --- a/frontend/javascripts/admin/team/create_team_modal_view.tsx +++ b/frontend/javascripts/admin/team/create_team_modal_view.tsx @@ -1,6 +1,6 @@ -import { Modal, Input, Form } from "antd"; import { TagOutlined } from "@ant-design/icons"; import { createTeam } from "admin/admin_rest_api"; +import { Form, Input, Modal } from "antd"; import Shortcut from "libs/shortcut_component"; const FormItem = Form.Item; type Props = { diff --git a/frontend/javascripts/admin/team/edit_team_modal_view.tsx b/frontend/javascripts/admin/team/edit_team_modal_view.tsx index ad7ba7a32c6..578f2d05576 100644 --- a/frontend/javascripts/admin/team/edit_team_modal_view.tsx +++ b/frontend/javascripts/admin/team/edit_team_modal_view.tsx @@ -1,11 +1,11 @@ import { MinusCircleOutlined, PlusCircleOutlined } from "@ant-design/icons"; import { getEditableUsers, updateUser } from "admin/admin_rest_api"; -import { Modal, AutoComplete, Input, Spin, Tooltip } from "antd"; +import { AutoComplete, Input, Modal, Spin, Tooltip } from "antd"; import type { DefaultOptionType } from "antd/lib/select"; +import { useEffectOnlyOnce } from "libs/react_hooks"; import { useState } from "react"; import type { APITeam, APITeamMembership, APIUser } from "types/api_flow_types"; import { filterTeamMembersOf, renderUsersForTeam } from "./team_list_view"; -import { useEffectOnlyOnce } from "libs/react_hooks"; type Props = { onCancel: (...args: Array) => any; diff --git a/frontend/javascripts/admin/team/team_list_view.tsx b/frontend/javascripts/admin/team/team_list_view.tsx index a07bb04605c..e1c3d0eb8e0 100644 --- a/frontend/javascripts/admin/team/team_list_view.tsx +++ b/frontend/javascripts/admin/team/team_list_view.tsx @@ -1,23 +1,23 @@ -import { PropTypes } from "@scalableminds/prop-types"; -import { Table, Spin, Button, Input, Alert, Tag, App } from "antd"; import { DeleteOutlined, PlusOutlined, UserOutlined } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import type { APITeam, APITeamMembership, APIUser } from "types/api_flow_types"; +import { PropTypes } from "@scalableminds/prop-types"; import { - getEditableTeams, deleteTeam as deleteTeamAPI, + getEditableTeams, getEditableUsers, } from "admin/admin_rest_api"; -import { handleGenericError } from "libs/error_handling"; -import LinkButton from "components/link_button"; import CreateTeamModal from "admin/team/create_team_modal_view"; +import { Alert, App, Button, Input, Spin, Table, Tag } from "antd"; +import LinkButton from "components/link_button"; +import { handleGenericError } from "libs/error_handling"; +import { stringToColor } from "libs/format_utils"; import Persistence from "libs/persistence"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import { stringToColor } from "libs/format_utils"; -import EditTeamModalView from "./edit_team_modal_view"; +import * as React from "react"; import { useEffect, useState } from "react"; +import type { APITeam, APITeamMembership, APIUser } from "types/api_flow_types"; +import EditTeamModalView from "./edit_team_modal_view"; const { Column } = Table; const { Search } = Input; diff --git a/frontend/javascripts/admin/user/experience_modal_view.tsx b/frontend/javascripts/admin/user/experience_modal_view.tsx index 8667d9f8f74..77de9b8b46e 100644 --- a/frontend/javascripts/admin/user/experience_modal_view.tsx +++ b/frontend/javascripts/admin/user/experience_modal_view.tsx @@ -1,14 +1,14 @@ -import { useState } from "react"; -import { Modal, Tooltip, Table, InputNumber, Tag, Badge } from "antd"; import { CloseOutlined, DeleteOutlined, RollbackOutlined } from "@ant-design/icons"; -import _ from "lodash"; -import type { APIUser, ExperienceDomainList } from "types/api_flow_types"; -import { handleGenericError } from "libs/error_handling"; import { updateUser } from "admin/admin_rest_api"; +import { Badge, InputNumber, Modal, Table, Tag, Tooltip } from "antd"; import HighlightableRow from "components/highlightable_row"; import SelectExperienceDomain from "components/select_experience_domain"; +import { handleGenericError } from "libs/error_handling"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import _ from "lodash"; +import { useState } from "react"; +import type { APIUser, ExperienceDomainList } from "types/api_flow_types"; const { Column } = Table; diff --git a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx index 33adc70da4f..9479c66da8e 100644 --- a/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx +++ b/frontend/javascripts/admin/user/permissions_and_teams_modal_view.tsx @@ -1,12 +1,12 @@ -import { Modal, Radio, Col, Row, Checkbox, Divider, type RadioChangeEvent, App } from "antd"; import { InfoCircleOutlined } from "@ant-design/icons"; -import React, { type Key, useEffect, useState } from "react"; +import { getEditableTeams, updateUser } from "admin/admin_rest_api"; +import { App, Checkbox, Col, Divider, Modal, Radio, type RadioChangeEvent, Row } from "antd"; +import { useFetch } from "libs/react_helpers"; +import * as Utils from "libs/utils"; import _ from "lodash"; -import type { APIUser, APITeam, APITeamMembership } from "types/api_flow_types"; -import { updateUser, getEditableTeams } from "admin/admin_rest_api"; import messages from "messages"; -import * as Utils from "libs/utils"; -import { useFetch } from "libs/react_helpers"; +import React, { type Key, useEffect, useState } from "react"; +import type { APITeam, APITeamMembership, APIUser } from "types/api_flow_types"; const RadioButton = Radio.Button; const RadioGroup = Radio.Group; diff --git a/frontend/javascripts/admin/user/user_list_view.tsx b/frontend/javascripts/admin/user/user_list_view.tsx index 70c27efe212..9cf7860aff0 100644 --- a/frontend/javascripts/admin/user/user_list_view.tsx +++ b/frontend/javascripts/admin/user/user_list_view.tsx @@ -1,7 +1,3 @@ -import type { RouteComponentProps } from "react-router-dom"; -import { Link } from "react-router-dom"; -import { PropTypes } from "@scalableminds/prop-types"; -import { Table, Tag, Spin, Button, Input, Modal, Alert, Row, Col, Tooltip, App } from "antd"; import { CheckCircleOutlined, CloseCircleOutlined, @@ -14,34 +10,38 @@ import { UserDeleteOutlined, UserOutlined, } from "@ant-design/icons"; -import { connect } from "react-redux"; -import React, { type Key, useEffect, useState } from "react"; -import _ from "lodash"; -import dayjs from "dayjs"; -import { location } from "libs/window"; -import type { - APIUser, - APITeamMembership, - ExperienceMap, - APIOrganization, -} from "types/api_flow_types"; -import { InviteUsersModal } from "admin/onboarding"; -import type { OxalisState } from "oxalis/store"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; -import LinkButton from "components/link_button"; +import { PropTypes } from "@scalableminds/prop-types"; import { getEditableUsers, updateUser } from "admin/admin_rest_api"; -import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import { InviteUsersModal } from "admin/onboarding"; +import { getActiveUserCount } from "admin/organization/pricing_plan_utils"; +import { renderTeamRolesAndPermissionsForUser } from "admin/team/team_list_view"; import ExperienceModalView from "admin/user/experience_modal_view"; -import Persistence from "libs/persistence"; import PermissionsAndTeamsModalView from "admin/user/permissions_and_teams_modal_view"; -import { getActiveUserCount } from "admin/organization/pricing_plan_utils"; +import { Alert, App, Button, Col, Input, Modal, Row, Spin, Table, Tag, Tooltip } from "antd"; +import LinkButton from "components/link_button"; +import dayjs from "dayjs"; +import Persistence from "libs/persistence"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import { location } from "libs/window"; +import _ from "lodash"; import messages from "messages"; +import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import type { OxalisState } from "oxalis/store"; +import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import React, { type Key, useEffect, useState } from "react"; +import { connect } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { Link } from "react-router-dom"; +import type { + APIOrganization, + APITeamMembership, + APIUser, + ExperienceMap, +} from "types/api_flow_types"; import { logoutUserAction } from "../../oxalis/model/actions/user_actions"; import Store from "../../oxalis/store"; -import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; -import { renderTeamRolesAndPermissionsForUser } from "admin/team/team_list_view"; const { Column } = Table; const { Search } = Input; diff --git a/frontend/javascripts/admin/user/user_selection_component.tsx b/frontend/javascripts/admin/user/user_selection_component.tsx index 4d9a1cecd99..ad22b0918b4 100644 --- a/frontend/javascripts/admin/user/user_selection_component.tsx +++ b/frontend/javascripts/admin/user/user_selection_component.tsx @@ -1,9 +1,9 @@ -import { Spin, Select } from "antd"; -import _ from "lodash"; import { getUsers } from "admin/admin_rest_api"; +import { Select, Spin } from "antd"; +import { handleGenericError } from "libs/error_handling"; import { useFetch } from "libs/react_helpers"; +import _ from "lodash"; import { useState } from "react"; -import { handleGenericError } from "libs/error_handling"; type Props = { handleSelection: (arg0: string) => void; diff --git a/frontend/javascripts/admin/voxelytics/ai_model_list_view.tsx b/frontend/javascripts/admin/voxelytics/ai_model_list_view.tsx index 83b34bdc296..a0fdd0c6e36 100644 --- a/frontend/javascripts/admin/voxelytics/ai_model_list_view.tsx +++ b/frontend/javascripts/admin/voxelytics/ai_model_list_view.tsx @@ -1,21 +1,21 @@ -import _ from "lodash"; -import { useState } from "react"; import { PlusOutlined, SyncOutlined } from "@ant-design/icons"; -import { Table, Button, Modal, Space } from "antd"; import { getAiModels } from "admin/admin_rest_api"; -import type { AiModel, APIAnnotation } from "types/api_flow_types"; +import { JobState } from "admin/job/job_list_view"; +import { Button, Modal, Space, Table } from "antd"; import FormattedDate from "components/formatted_date"; +import { PageNotAvailableToNormalUser } from "components/permission_enforcer"; +import { useGuardedFetch } from "libs/react_helpers"; +import _ from "lodash"; +import type { Vector3 } from "oxalis/constants"; +import { getMagInfo, getSegmentationLayerByName } from "oxalis/model/accessors/dataset_accessor"; import { formatUserName } from "oxalis/model/accessors/user_accessor"; -import { useSelector } from "react-redux"; import type { OxalisState } from "oxalis/store"; -import { JobState } from "admin/job/job_list_view"; -import { Link } from "react-router-dom"; -import { useGuardedFetch } from "libs/react_helpers"; -import { PageNotAvailableToNormalUser } from "components/permission_enforcer"; import { type AnnotationInfoForAIJob, TrainAiModelTab } from "oxalis/view/jobs/train_ai_model"; -import { getMagInfo, getSegmentationLayerByName } from "oxalis/model/accessors/dataset_accessor"; -import type { Vector3 } from "oxalis/constants"; +import { useState } from "react"; import type { Key } from "react"; +import { useSelector } from "react-redux"; +import { Link } from "react-router-dom"; +import type { APIAnnotation, AiModel } from "types/api_flow_types"; export default function AiModelListView() { const activeUser = useSelector((state: OxalisState) => state.activeUser); diff --git a/frontend/javascripts/admin/voxelytics/artifacts_disk_usage_list.tsx b/frontend/javascripts/admin/voxelytics/artifacts_disk_usage_list.tsx index b8647a1cef0..3fec1e3da80 100644 --- a/frontend/javascripts/admin/voxelytics/artifacts_disk_usage_list.tsx +++ b/frontend/javascripts/admin/voxelytics/artifacts_disk_usage_list.tsx @@ -1,15 +1,15 @@ -import type React from "react"; import { Table } from "antd"; import _ from "lodash"; +import type React from "react"; +import type { ColumnsType } from "antd/lib/table"; +import { formatCountToDataAmountUnit } from "libs/format_utils"; +import { localeCompareBy } from "libs/utils"; import type { VoxelyticsArtifactConfig, VoxelyticsTaskConfigWithHierarchy, } from "types/api_flow_types"; -import { formatCountToDataAmountUnit } from "libs/format_utils"; -import type { ColumnsType } from "antd/lib/table"; import { renderArtifactPath } from "./artifacts_view"; -import { localeCompareBy } from "libs/utils"; type ArtifactTableEntry = { artifactName: string; diff --git a/frontend/javascripts/admin/voxelytics/artifacts_view.tsx b/frontend/javascripts/admin/voxelytics/artifacts_view.tsx index 3457b46ce7d..5ed2d9a5fd5 100644 --- a/frontend/javascripts/admin/voxelytics/artifacts_view.tsx +++ b/frontend/javascripts/admin/voxelytics/artifacts_view.tsx @@ -1,11 +1,11 @@ -import { JSONTree } from "react-json-tree"; -import { Button, Card, message } from "antd"; import { CopyOutlined, ExportOutlined } from "@ant-design/icons"; -import type { VoxelyticsArtifactConfig } from "types/api_flow_types"; import { getVoxelyticsArtifactChecksums } from "admin/admin_rest_api"; +import { Button, Card, message } from "antd"; import { formatCountToDataAmountUnit } from "libs/format_utils"; -import { copyToClipboad, isObjectEmpty, useTheme } from "./utils"; +import { JSONTree } from "react-json-tree"; import { Link } from "react-router-dom"; +import type { VoxelyticsArtifactConfig } from "types/api_flow_types"; +import { copyToClipboad, isObjectEmpty, useTheme } from "./utils"; export function renderArtifactPath(artifact: VoxelyticsArtifactConfig) { return ( diff --git a/frontend/javascripts/admin/voxelytics/dag_view.tsx b/frontend/javascripts/admin/voxelytics/dag_view.tsx index 8756d1e1069..d6aea0a6627 100644 --- a/frontend/javascripts/admin/voxelytics/dag_view.tsx +++ b/frontend/javascripts/admin/voxelytics/dag_view.tsx @@ -1,3 +1,4 @@ +import dagre from "dagre"; import { useRef, useState } from "react"; import ReactFlow, { MiniMap, @@ -6,20 +7,19 @@ import ReactFlow, { type Edge as FlowEdge, type ReactFlowInstance, } from "react-flow-renderer"; -import dagre from "dagre"; +import { ExpandOutlined, MinusOutlined, PlusOutlined } from "@ant-design/icons"; +import { Button } from "antd"; import ColorHash from "color-hash"; import { memoize } from "lodash"; +import type { OxalisState, Theme } from "oxalis/store"; +import { useSelector } from "react-redux"; import { VoxelyticsRunState, type VoxelyticsTaskConfigWithName, type VoxelyticsWorkflowDag, type VoxelyticsWorkflowDagEdge, } from "types/api_flow_types"; -import { useSelector } from "react-redux"; -import type { OxalisState, Theme } from "oxalis/store"; -import { Button } from "antd"; -import { ExpandOutlined, MinusOutlined, PlusOutlined } from "@ant-design/icons"; export const colorHasher = new ColorHash({ lightness: [0.35, 0.5, 0.65] }); @@ -196,7 +196,7 @@ function getEdgesAndNodes( style: { opacity, strokeWidth }, labelStyle: { opacity, - fill: labelFontColor ?? theme === "light" ? "black" : "white", + fill: (labelFontColor ?? theme === "light") ? "black" : "white", }, labelBgStyle: { fill: theme === "light" ? "white" : "black", diff --git a/frontend/javascripts/admin/voxelytics/log_tab.tsx b/frontend/javascripts/admin/voxelytics/log_tab.tsx index 9789b18b715..7dd9cce5397 100644 --- a/frontend/javascripts/admin/voxelytics/log_tab.tsx +++ b/frontend/javascripts/admin/voxelytics/log_tab.tsx @@ -1,14 +1,14 @@ -import { useMemo, useState } from "react"; -import { Button, message, Select, Switch } from "antd"; -import chalk from "chalk"; +import { SyncOutlined } from "@ant-design/icons"; +import { getVoxelyticsLogs } from "admin/admin_rest_api"; import Ansi from "ansi-to-react"; +import { Button, Select, Switch, message } from "antd"; +import chalk from "chalk"; import classnames from "classnames"; import { usePolling } from "libs/react_hooks"; -import { SyncOutlined } from "@ant-design/icons"; -import { getVoxelyticsLogs } from "admin/admin_rest_api"; -import { addAfterPadding, addBeforePadding, type Result, VX_POLLING_INTERVAL } from "./utils"; -import type { VoxelyticsLogLine } from "types/api_flow_types"; import { LOG_LEVELS } from "oxalis/constants"; +import { useMemo, useState } from "react"; +import type { VoxelyticsLogLine } from "types/api_flow_types"; +import { type Result, VX_POLLING_INTERVAL, addAfterPadding, addBeforePadding } from "./utils"; type LogResult = Result>; diff --git a/frontend/javascripts/admin/voxelytics/statistics_tab.tsx b/frontend/javascripts/admin/voxelytics/statistics_tab.tsx index 44453c9666b..c2b376eb648 100644 --- a/frontend/javascripts/admin/voxelytics/statistics_tab.tsx +++ b/frontend/javascripts/admin/voxelytics/statistics_tab.tsx @@ -1,19 +1,19 @@ -import { useState } from "react"; -import { Button, Tooltip } from "antd"; import { SyncOutlined } from "@ant-design/icons"; import { getVoxelyticsChunkStatistics } from "admin/admin_rest_api"; -import { usePolling } from "libs/react_hooks"; +import { Button, Tooltip } from "antd"; +import dayjs from "dayjs"; import { - formatCountToDataAmountUnit, formatCPU, - formatTimeIntervalStrict, + formatCountToDataAmountUnit, formatDurationStrict, formatNumber, + formatTimeIntervalStrict, } from "libs/format_utils"; +import { usePolling } from "libs/react_hooks"; +import { pluralize } from "libs/utils"; +import { useState } from "react"; import type { VoxelyticsChunkStatistics } from "types/api_flow_types"; import type { Result } from "./utils"; -import dayjs from "dayjs"; -import { pluralize } from "libs/utils"; type StatisticsResult = Result>; diff --git a/frontend/javascripts/admin/voxelytics/task_list_view.tsx b/frontend/javascripts/admin/voxelytics/task_list_view.tsx index feecafa1149..0447915039a 100644 --- a/frontend/javascripts/admin/voxelytics/task_list_view.tsx +++ b/frontend/javascripts/admin/voxelytics/task_list_view.tsx @@ -1,36 +1,47 @@ -import React, { useEffect, useState, useMemo } from "react"; import { + CheckCircleOutlined, + ClockCircleOutlined, + CloseCircleOutlined, + ExclamationCircleOutlined, + ExportOutlined, + FieldTimeOutlined, + LeftOutlined, + MinusCircleOutlined, + SyncOutlined, +} from "@ant-design/icons"; +import { + App, + Button, + Col, Collapse, + type CollapseProps, + Dropdown, Input, + type MenuProps, Row, - Col, - Button, - Dropdown, - notification, - message, + Select, Tag, Tooltip, - Select, - type MenuProps, - App, - type CollapseProps, + message, + notification, } from "antd"; -import { - ClockCircleOutlined, - MinusCircleOutlined, - CloseCircleOutlined, - SyncOutlined, - CheckCircleOutlined, - ExclamationCircleOutlined, - LeftOutlined, - FieldTimeOutlined, - ExportOutlined, -} from "@ant-design/icons"; import MiniSearch from "minisearch"; +import React, { useEffect, useState, useMemo } from "react"; -import { Link, useHistory, useLocation, useParams } from "react-router-dom"; +import { deleteWorkflow, getVoxelyticsLogs } from "admin/admin_rest_api"; import dayjs from "dayjs"; +import { + formatDateMedium, + formatDurationStrict, + formatTimeInterval, + formatTimeIntervalStrict, +} from "libs/format_utils"; import { useSearchParams, useUpdateEvery } from "libs/react_hooks"; +import { notEmpty } from "libs/utils"; +import { LOG_LEVELS } from "oxalis/constants"; +import type { OxalisState } from "oxalis/store"; +import { useSelector } from "react-redux"; +import { Link, useHistory, useLocation, useParams } from "react-router-dom"; import { VoxelyticsRunState, type VoxelyticsTaskConfig, @@ -39,23 +50,12 @@ import { type VoxelyticsTaskInfo, type VoxelyticsWorkflowReport, } from "types/api_flow_types"; -import { - formatDateMedium, - formatTimeInterval, - formatTimeIntervalStrict, - formatDurationStrict, -} from "libs/format_utils"; +import type { ArrayElement } from "types/globals"; +import ArtifactsDiskUsageList from "./artifacts_disk_usage_list"; import DAGView, { colorHasher } from "./dag_view"; -import TaskView from "./task_view"; import { formatLog } from "./log_tab"; +import TaskView from "./task_view"; import { addAfterPadding, addBeforePadding } from "./utils"; -import { LOG_LEVELS } from "oxalis/constants"; -import { getVoxelyticsLogs, deleteWorkflow } from "admin/admin_rest_api"; -import ArtifactsDiskUsageList from "./artifacts_disk_usage_list"; -import { notEmpty } from "libs/utils"; -import type { ArrayElement } from "types/globals"; -import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; const { Search } = Input; diff --git a/frontend/javascripts/admin/voxelytics/task_view.tsx b/frontend/javascripts/admin/voxelytics/task_view.tsx index e9eed30e35b..56ca3d8405b 100644 --- a/frontend/javascripts/admin/voxelytics/task_view.tsx +++ b/frontend/javascripts/admin/voxelytics/task_view.tsx @@ -1,6 +1,7 @@ -import { JSONTree, type ShouldExpandNodeInitially, type LabelRenderer } from "react-json-tree"; import { Progress, Tabs, type TabsProps, Tooltip } from "antd"; +import { formatNumber } from "libs/format_utils"; import Markdown from "libs/markdown_adapter"; +import { JSONTree, type LabelRenderer, type ShouldExpandNodeInitially } from "react-json-tree"; import { type VoxelyticsArtifactConfig, VoxelyticsRunState, @@ -12,7 +13,6 @@ import ArtifactsViewer from "./artifacts_view"; import LogTab from "./log_tab"; import StatisticsTab from "./statistics_tab"; import { runStateToStatus, useTheme } from "./utils"; -import { formatNumber } from "libs/format_utils"; const labelRenderer: LabelRenderer = function (_keyPath) { const keyPath = _keyPath.slice().reverse(); diff --git a/frontend/javascripts/admin/voxelytics/workflow_list_view.tsx b/frontend/javascripts/admin/voxelytics/workflow_list_view.tsx index 0a378dccf16..55937c21ede 100644 --- a/frontend/javascripts/admin/voxelytics/workflow_list_view.tsx +++ b/frontend/javascripts/admin/voxelytics/workflow_list_view.tsx @@ -1,21 +1,21 @@ +import { SyncOutlined } from "@ant-design/icons"; +import { PropTypes } from "@scalableminds/prop-types"; +import { getVoxelyticsWorkflows } from "admin/admin_rest_api"; +import { Button, Input, Progress, Table, Tooltip } from "antd"; +import { formatCountToDataAmountUnit, formatDateMedium, formatNumber } from "libs/format_utils"; +import Persistence from "libs/persistence"; +import { usePolling } from "libs/react_hooks"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; import type React from "react"; import { type Key, useEffect, useMemo, useState } from "react"; -import { SyncOutlined } from "@ant-design/icons"; -import { Table, Progress, Tooltip, Button, Input } from "antd"; import { Link } from "react-router-dom"; -import { getVoxelyticsWorkflows } from "admin/admin_rest_api"; import { - type VoxelyticsWorkflowListingRun, VoxelyticsRunState, type VoxelyticsWorkflowListing, + type VoxelyticsWorkflowListingRun, } from "types/api_flow_types"; -import { usePolling } from "libs/react_hooks"; -import { formatCountToDataAmountUnit, formatDateMedium, formatNumber } from "libs/format_utils"; -import Toast from "libs/toast"; -import { runStateToStatus, VX_POLLING_INTERVAL } from "./utils"; -import Persistence from "libs/persistence"; -import * as Utils from "libs/utils"; -import { PropTypes } from "@scalableminds/prop-types"; +import { VX_POLLING_INTERVAL, runStateToStatus } from "./utils"; const { Search } = Input; diff --git a/frontend/javascripts/admin/voxelytics/workflow_view.tsx b/frontend/javascripts/admin/voxelytics/workflow_view.tsx index 6c6ad07d206..6cf55655da7 100644 --- a/frontend/javascripts/admin/voxelytics/workflow_view.tsx +++ b/frontend/javascripts/admin/voxelytics/workflow_view.tsx @@ -1,7 +1,13 @@ +import { getVoxelyticsWorkflow, isWorkflowAccessibleBySwitching } from "admin/admin_rest_api"; +import BrainSpinner, { BrainSpinnerWithError } from "components/brain_spinner"; +import { usePolling, useSearchParams } from "libs/react_hooks"; +import Toast from "libs/toast"; import _ from "lodash"; -import { useEffect, useState, useMemo } from "react"; -import { useParams } from "react-router-dom"; +import type { OxalisState } from "oxalis/store"; +import TabTitle from "oxalis/view/components/tab_title_component"; +import { useEffect, useMemo, useState } from "react"; import { useSelector } from "react-redux"; +import { useParams } from "react-router-dom"; import { type APIOrganization, VoxelyticsRunState, @@ -14,12 +20,6 @@ import { type VoxelyticsWorkflowDagNode, type VoxelyticsWorkflowReport, } from "types/api_flow_types"; -import { useSearchParams, usePolling } from "libs/react_hooks"; -import Toast from "libs/toast"; -import type { OxalisState } from "oxalis/store"; -import TabTitle from "oxalis/view/components/tab_title_component"; -import { getVoxelyticsWorkflow, isWorkflowAccessibleBySwitching } from "admin/admin_rest_api"; -import BrainSpinner, { BrainSpinnerWithError } from "components/brain_spinner"; import TaskListView from "./task_list_view"; import { VX_POLLING_INTERVAL } from "./utils"; diff --git a/frontend/javascripts/admin/welcome_ui.tsx b/frontend/javascripts/admin/welcome_ui.tsx index 03df025e82e..ebef286618b 100644 --- a/frontend/javascripts/admin/welcome_ui.tsx +++ b/frontend/javascripts/admin/welcome_ui.tsx @@ -1,12 +1,12 @@ import { CloseOutlined } from "@ant-design/icons"; +import { InviteUsersModal } from "admin/onboarding"; import { Button, Tooltip } from "antd"; +import { getDemoDatasetUrl } from "features"; +import renderIndependently from "libs/render_independently"; +import { isUserAdminOrDatasetManager, isUserAdminOrTeamManager } from "libs/utils"; import * as React from "react"; import { Link } from "react-router-dom"; -import { isUserAdminOrTeamManager, isUserAdminOrDatasetManager } from "libs/utils"; import type { APIUser } from "types/api_flow_types"; -import { getDemoDatasetUrl } from "features"; -import renderIndependently from "libs/render_independently"; -import { InviteUsersModal } from "admin/onboarding"; type WhatsNextActionProps = { title: string; description: string; diff --git a/frontend/javascripts/banners.tsx b/frontend/javascripts/banners.tsx index fe02704d528..92b71ffa215 100644 --- a/frontend/javascripts/banners.tsx +++ b/frontend/javascripts/banners.tsx @@ -8,6 +8,7 @@ import FormattedDate from "components/formatted_date"; import dayjs from "dayjs"; import { useFetch, useInterval } from "libs/react_helpers"; import { parseCTimeDefaultDate } from "libs/utils"; +import * as Utils from "libs/utils"; import _ from "lodash"; import constants from "oxalis/constants"; import { setNavbarHeightAction } from "oxalis/model/actions/ui_actions"; @@ -18,7 +19,6 @@ import type React from "react"; import { useEffect, useState } from "react"; import { useSelector } from "react-redux"; import type { MaintenanceInfo } from "types/api_flow_types"; -import * as Utils from "libs/utils"; const INITIAL_DELAY = 5000; const INTERVAL_TO_FETCH_MAINTENANCES_MS = 60000; // 1min diff --git a/frontend/javascripts/components/async_clickables.tsx b/frontend/javascripts/components/async_clickables.tsx index 1b63ccb17c7..043b0f602be 100644 --- a/frontend/javascripts/components/async_clickables.tsx +++ b/frontend/javascripts/components/async_clickables.tsx @@ -1,5 +1,5 @@ -import { Button, ConfigProvider, type ButtonProps } from "antd"; import { LoadingOutlined } from "@ant-design/icons"; +import { Button, type ButtonProps, ConfigProvider } from "antd"; import * as React from "react"; import FastTooltip from "./fast_tooltip"; const { useState, useEffect, useRef } = React; diff --git a/frontend/javascripts/components/brain_spinner.tsx b/frontend/javascripts/components/brain_spinner.tsx index 8d16e026215..f47abf02602 100644 --- a/frontend/javascripts/components/brain_spinner.tsx +++ b/frontend/javascripts/components/brain_spinner.tsx @@ -1,11 +1,11 @@ -import type * as React from "react"; -import type { APIOrganization } from "types/api_flow_types"; -import { AsyncButton } from "components/async_clickables"; import { switchToOrganization } from "admin/admin_rest_api"; +import LoginForm from "admin/auth/login_form"; +import { Button, Card, Col, Row } from "antd"; +import { AsyncButton } from "components/async_clickables"; import messages from "messages"; +import type * as React from "react"; import { Link } from "react-router-dom"; -import { Button, Card, Col, Row } from "antd"; -import LoginForm from "admin/auth/login_form"; +import type { APIOrganization } from "types/api_flow_types"; type Props = { message?: React.ReactNode; diff --git a/frontend/javascripts/components/color_picker.tsx b/frontend/javascripts/components/color_picker.tsx index a896ae00a8d..6a4967d86e8 100644 --- a/frontend/javascripts/components/color_picker.tsx +++ b/frontend/javascripts/components/color_picker.tsx @@ -1,9 +1,9 @@ -import { useRef, useState } from "react"; import { Popover } from "antd"; -import * as Utils from "libs/utils"; -import { HexColorInput, HexColorPicker } from "react-colorful"; import useThrottledCallback from "beautiful-react-hooks/useThrottledCallback"; +import * as Utils from "libs/utils"; import type { Vector3 } from "oxalis/constants"; +import { useRef, useState } from "react"; +import { HexColorInput, HexColorPicker } from "react-colorful"; export const ThrottledColorPicker = ({ color, diff --git a/frontend/javascripts/components/disable_generic_dnd.ts b/frontend/javascripts/components/disable_generic_dnd.ts index 127d1ff47d5..9d3f618a4b6 100644 --- a/frontend/javascripts/components/disable_generic_dnd.ts +++ b/frontend/javascripts/components/disable_generic_dnd.ts @@ -1,5 +1,5 @@ -import React from "react"; import window from "libs/window"; +import React from "react"; import type { EmptyObject } from "types/globals"; export default class DisableGenericDnd extends React.Component { componentDidMount() { diff --git a/frontend/javascripts/components/legal.tsx b/frontend/javascripts/components/legal.tsx index c0d19833378..7d12c78fc94 100644 --- a/frontend/javascripts/components/legal.tsx +++ b/frontend/javascripts/components/legal.tsx @@ -1,8 +1,8 @@ -import { Row, Col, Card } from "antd"; +import { getDefaultOrganization, getOperatorData } from "admin/admin_rest_api"; +import { Card, Col, Row } from "antd"; import Markdown from "libs/markdown_adapter"; import React from "react"; import type { APIOrganization } from "types/api_flow_types"; -import { getOperatorData, getDefaultOrganization } from "admin/admin_rest_api"; import type { EmptyObject } from "types/globals"; type Props = EmptyObject; type State = { diff --git a/frontend/javascripts/components/loop.ts b/frontend/javascripts/components/loop.ts index 4216cc20b35..a529c21a589 100644 --- a/frontend/javascripts/components/loop.ts +++ b/frontend/javascripts/components/loop.ts @@ -1,5 +1,5 @@ -import { Component } from "react"; import window from "libs/window"; +import { Component } from "react"; import type { EmptyObject } from "types/globals"; type LoopProps = { interval: number; diff --git a/frontend/javascripts/components/permission_enforcer.tsx b/frontend/javascripts/components/permission_enforcer.tsx index 5f279aac6a9..f7a1f29c796 100644 --- a/frontend/javascripts/components/permission_enforcer.tsx +++ b/frontend/javascripts/components/permission_enforcer.tsx @@ -1,4 +1,4 @@ -import { Button, Result, Col, Row } from "antd"; +import { Button, Col, Result, Row } from "antd"; import { Link } from "react-router-dom"; export function PageNotAvailableToNormalUser() { diff --git a/frontend/javascripts/components/pricing_enforcers.tsx b/frontend/javascripts/components/pricing_enforcers.tsx index ec5fde4cbe0..cf769e790e3 100644 --- a/frontend/javascripts/components/pricing_enforcers.tsx +++ b/frontend/javascripts/components/pricing_enforcers.tsx @@ -1,22 +1,22 @@ -import React from "react"; -import { useSelector } from "react-redux"; -import { Alert, type ButtonProps, Button, Result, Popover, Col, Row } from "antd"; import { LockOutlined } from "@ant-design/icons"; import { + type PricingPlanEnum, getFeatureNotAvailableInPlanMessage, isFeatureAllowedByPricingPlan, - type PricingPlanEnum, } from "admin/organization/pricing_plan_utils"; import { isUserAllowedToRequestUpgrades } from "admin/organization/pricing_plan_utils"; -import { Link } from "react-router-dom"; -import type { OxalisState } from "oxalis/store"; -import { rgbToHex } from "libs/utils"; -import { PRIMARY_COLOR } from "oxalis/constants"; import UpgradePricingPlanModal from "admin/organization/upgrade_plan_modal"; -import type { APIOrganization, APIUser } from "types/api_flow_types"; +import { Alert, Button, type ButtonProps, Col, Popover, Result, Row } from "antd"; +import type { PopoverProps } from "antd/lib"; import type { TooltipPlacement } from "antd/lib/tooltip"; +import { rgbToHex } from "libs/utils"; +import { PRIMARY_COLOR } from "oxalis/constants"; +import type { OxalisState } from "oxalis/store"; import { SwitchSetting } from "oxalis/view/components/setting_input_views"; -import type { PopoverProps } from "antd/lib"; +import React from "react"; +import { useSelector } from "react-redux"; +import { Link } from "react-router-dom"; +import type { APIOrganization, APIUser } from "types/api_flow_types"; const PRIMARY_COLOR_HEX = rgbToHex(PRIMARY_COLOR); diff --git a/frontend/javascripts/components/redirect.tsx b/frontend/javascripts/components/redirect.tsx index 68bfb3ea29e..defacdddddb 100644 --- a/frontend/javascripts/components/redirect.tsx +++ b/frontend/javascripts/components/redirect.tsx @@ -1,7 +1,7 @@ +import { useEffectOnlyOnce } from "libs/react_hooks"; +import type React from "react"; import type { RouteComponentProps } from "react-router-dom"; import { withRouter } from "react-router-dom"; -import type React from "react"; -import { useEffectOnlyOnce } from "libs/react_hooks"; type Props = { redirectTo: () => Promise; diff --git a/frontend/javascripts/components/secured_route.tsx b/frontend/javascripts/components/secured_route.tsx index 67768a9a3cd..93c85c4f9db 100644 --- a/frontend/javascripts/components/secured_route.tsx +++ b/frontend/javascripts/components/secured_route.tsx @@ -1,17 +1,17 @@ -import React from "react"; -import { Route, withRouter } from "react-router-dom"; -import { connect } from "react-redux"; import LoginView from "admin/auth/login_view"; import { - isFeatureAllowedByPricingPlan, type PricingPlanEnum, + isFeatureAllowedByPricingPlan, } from "admin/organization/pricing_plan_utils"; -import type { APIOrganization, APIUser } from "types/api_flow_types"; import { PageUnavailableForYourPlanView } from "components/pricing_enforcers"; -import type { ComponentType } from "react"; import { isUserAdminOrManager } from "libs/utils"; -import type { RouteComponentProps } from "react-router-dom"; import type { OxalisState } from "oxalis/store"; +import React from "react"; +import type { ComponentType } from "react"; +import { connect } from "react-redux"; +import { Route, withRouter } from "react-router-dom"; +import type { RouteComponentProps } from "react-router-dom"; +import type { APIOrganization, APIUser } from "types/api_flow_types"; import { PageNotAvailableToNormalUser } from "./permission_enforcer"; type StateProps = { diff --git a/frontend/javascripts/components/select_experience_domain.tsx b/frontend/javascripts/components/select_experience_domain.tsx index 96dc348c46c..bdd55586e47 100644 --- a/frontend/javascripts/components/select_experience_domain.tsx +++ b/frontend/javascripts/components/select_experience_domain.tsx @@ -1,7 +1,7 @@ +import { getExistingExperienceDomains } from "admin/admin_rest_api"; import { Select, Tooltip } from "antd"; import * as React from "react"; import type { ExperienceDomainList } from "types/api_flow_types"; -import { getExistingExperienceDomains } from "admin/admin_rest_api"; type Props = { value?: string | Array; width: number; diff --git a/frontend/javascripts/components/terms_of_services_check.tsx b/frontend/javascripts/components/terms_of_services_check.tsx index d6b39d39a0d..57ddade4ed5 100644 --- a/frontend/javascripts/components/terms_of_services_check.tsx +++ b/frontend/javascripts/components/terms_of_services_check.tsx @@ -1,3 +1,5 @@ +import { DownOutlined } from "@ant-design/icons"; +import { getUsersOrganizations } from "admin/admin_rest_api"; import { type AcceptanceInfo, acceptTermsOfService, @@ -6,19 +8,17 @@ import { } from "admin/api/terms_of_service"; import { Dropdown, type MenuProps, Modal, Space, Spin } from "antd"; import { AsyncButton } from "components/async_clickables"; +import dayjs from "dayjs"; import { useFetch } from "libs/react_helpers"; import UserLocalStorage from "libs/user_local_storage"; -import dayjs from "dayjs"; +import _ from "lodash"; +import { switchTo } from "navbar"; import type { OxalisState } from "oxalis/store"; import type React from "react"; import { useEffect, useState } from "react"; import { useSelector } from "react-redux"; -import { formatDateInLocalTimeZone } from "./formatted_date"; -import { switchTo } from "navbar"; -import { getUsersOrganizations } from "admin/admin_rest_api"; -import { DownOutlined } from "@ant-design/icons"; -import _ from "lodash"; import type { APIUser } from "types/api_flow_types"; +import { formatDateInLocalTimeZone } from "./formatted_date"; const SNOOZE_DURATION_IN_DAYS = 3; const LAST_TERMS_OF_SERVICE_WARNING_KEY = "lastTermsOfServiceWarning"; diff --git a/frontend/javascripts/components/text_with_description.tsx b/frontend/javascripts/components/text_with_description.tsx index 6eac349da69..4fabe2e92d5 100644 --- a/frontend/javascripts/components/text_with_description.tsx +++ b/frontend/javascripts/components/text_with_description.tsx @@ -1,8 +1,8 @@ import { Popover, Tooltip } from "antd"; import Markdown from "libs/markdown_adapter"; -import * as React from "react"; import type { EditableTextLabelProp } from "oxalis/view/components/editable_text_label"; import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import * as React from "react"; type EditableProps = EditableTextLabelProp & { isEditable: true; diff --git a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx index de387d053d3..a3185522507 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx @@ -1,19 +1,19 @@ import { InfoCircleOutlined } from "@ant-design/icons"; -import { Link } from "react-router-dom"; -import { Modal, Radio, Button, Tooltip, Spin } from "antd"; -import React, { useEffect, useState } from "react"; -import type { APIDataset, APISegmentationLayer } from "types/api_flow_types"; +import { getDataset } from "admin/admin_rest_api"; +import { Button, Modal, Radio, Spin, Tooltip } from "antd"; +import { Slider } from "components/slider"; +import { useFetch } from "libs/react_helpers"; import { doesSupportVolumeWithFallback, - getSomeMagInfoForDataset, - getSegmentationLayers, getMagInfo, getSegmentationLayerByName, + getSegmentationLayers, + getSomeMagInfoForDataset, } from "oxalis/model/accessors/dataset_accessor"; -import { getDataset } from "admin/admin_rest_api"; -import { useFetch } from "libs/react_helpers"; import type { MagInfo } from "oxalis/model/helpers/mag_info"; -import { Slider } from "components/slider"; +import React, { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; +import type { APIDataset, APISegmentationLayer } from "types/api_flow_types"; type Props = { datasetId: string; diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_access_list_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_access_list_view.tsx index 58968d349f9..4e98b4074c0 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_access_list_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_access_list_view.tsx @@ -1,10 +1,10 @@ -import { Spin, Tag } from "antd"; -import * as React from "react"; -import type { APIDataset, APIUser } from "types/api_flow_types"; import { getDatasetAccessList } from "admin/admin_rest_api"; +import { Spin, Tag } from "antd"; import { handleGenericError } from "libs/error_handling"; import { stringToColor } from "libs/format_utils"; import { useFetch } from "libs/react_helpers"; +import * as React from "react"; +import type { APIDataset, APIUser } from "types/api_flow_types"; type Props = { dataset: APIDataset; diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx index d6351bae80f..4c1d3c6767d 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx @@ -8,20 +8,20 @@ import { SettingOutlined, WarningOutlined, } from "@ant-design/icons"; -import window from "libs/window"; -import { Link, type LinkProps } from "react-router-dom"; -import type * as React from "react"; -import type { APIDataset, APIDatasetCompact } from "types/api_flow_types"; +import { useQueryClient } from "@tanstack/react-query"; import { clearCache, deleteDatasetOnDisk, getDataset } from "admin/admin_rest_api"; -import Toast from "libs/toast"; -import messages from "messages"; -import CreateExplorativeModal from "dashboard/advanced_dataset/create_explorative_modal"; import { type MenuProps, Modal, Typography } from "antd"; -import { useState } from "react"; +import CreateExplorativeModal from "dashboard/advanced_dataset/create_explorative_modal"; import { confirmAsync } from "dashboard/dataset/helper_components"; -import { useQueryClient } from "@tanstack/react-query"; -import { getNoActionsAvailableMenu } from "oxalis/view/context_menu"; +import Toast from "libs/toast"; +import window from "libs/window"; +import messages from "messages"; import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import { getNoActionsAvailableMenu } from "oxalis/view/context_menu"; +import type * as React from "react"; +import { useState } from "react"; +import { Link, type LinkProps } from "react-router-dom"; +import type { APIDataset, APIDatasetCompact } from "types/api_flow_types"; const disabledStyle: React.CSSProperties = { pointerEvents: "none", diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx index 417bf0e4843..ec16ac078c7 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx @@ -1,47 +1,47 @@ import { FileOutlined, FolderOpenOutlined, PlusOutlined, WarningOutlined } from "@ant-design/icons"; -import { Link } from "react-router-dom"; +import type { DatasetUpdater } from "admin/admin_rest_api"; import { Dropdown, type MenuProps, type TableProps, Tag, Tooltip } from "antd"; import type { FilterValue, SorterResult, TablePaginationConfig } from "antd/lib/table/interface"; -import * as React from "react"; -import _ from "lodash"; -import { diceCoefficient as dice } from "dice-coefficient"; -import type { OxalisState } from "oxalis/store"; -import type { - APIDatasetCompact, - APIMaybeUnimportedDataset, - FolderItem, -} from "types/api_flow_types"; -import type { DatasetFilteringMode } from "dashboard/dataset_view"; -import { stringToColor } from "libs/format_utils"; -import CategorizationLabel from "oxalis/view/components/categorization_label"; +import classNames from "classnames"; +import FixedExpandableTable from "components/fixed_expandable_table"; +import FormattedDate from "components/formatted_date"; import DatasetActionView, { getDatasetActionContextMenu, } from "dashboard/advanced_dataset/dataset_action_view"; -import EditableTextIcon from "oxalis/view/components/editable_text_icon"; -import FormattedDate from "components/formatted_date"; +import type { DatasetCollectionContextValue } from "dashboard/dataset/dataset_collection_context"; +import { MINIMUM_SEARCH_QUERY_LENGTH } from "dashboard/dataset/queries"; +import type { DatasetFilteringMode } from "dashboard/dataset_view"; +import { + type DnDDropItemProps, + generateSettingsForFolder, + useDatasetDrop, +} from "dashboard/folders/folder_tree"; +import { diceCoefficient as dice } from "dice-coefficient"; +import { stringToColor } from "libs/format_utils"; +import Shortcut from "libs/shortcut_component"; import * as Utils from "libs/utils"; -import FixedExpandableTable from "components/fixed_expandable_table"; -import { DndProvider, DragPreviewImage, useDrag } from "react-dnd"; -import { HTML5Backend } from "react-dnd-html5-backend"; +import _ from "lodash"; +import { Unicode } from "oxalis/constants"; +import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import type { OxalisState } from "oxalis/store"; +import CategorizationLabel from "oxalis/view/components/categorization_label"; +import EditableTextIcon from "oxalis/view/components/editable_text_icon"; import { ContextMenuContext, GenericContextMenuContainer, getContextMenuPositionFromEvent, } from "oxalis/view/context_menu"; -import Shortcut from "libs/shortcut_component"; -import { MINIMUM_SEARCH_QUERY_LENGTH } from "dashboard/dataset/queries"; +import * as React from "react"; +import { DndProvider, DragPreviewImage, useDrag } from "react-dnd"; +import { HTML5Backend } from "react-dnd-html5-backend"; import { useSelector } from "react-redux"; -import type { DatasetCollectionContextValue } from "dashboard/dataset/dataset_collection_context"; -import { Unicode } from "oxalis/constants"; -import type { DatasetUpdater } from "admin/admin_rest_api"; -import { - generateSettingsForFolder, - useDatasetDrop, - type DnDDropItemProps, -} from "dashboard/folders/folder_tree"; -import classNames from "classnames"; +import { Link } from "react-router-dom"; +import type { + APIDatasetCompact, + APIMaybeUnimportedDataset, + FolderItem, +} from "types/api_flow_types"; import type { EmptyObject } from "types/globals"; -import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; type FolderItemWithName = FolderItem & { name: string }; export type DatasetOrFolder = APIDatasetCompact | FolderItemWithName; diff --git a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx index 6e9124d57db..281ea8bd13a 100644 --- a/frontend/javascripts/dashboard/dashboard_task_list_view.tsx +++ b/frontend/javascripts/dashboard/dashboard_task_list_view.tsx @@ -1,4 +1,3 @@ -import { Button, Modal, Tag, Card, Row, Col, List, Tooltip } from "antd"; import { CheckCircleOutlined, DeleteOutlined, @@ -9,31 +8,32 @@ import { TeamOutlined, UserAddOutlined, } from "@ant-design/icons"; -import { Link } from "react-router-dom"; import { PropTypes } from "@scalableminds/prop-types"; -import { connect } from "react-redux"; +import { Button, Card, Col, List, Modal, Row, Tag, Tooltip } from "antd"; import Markdown from "libs/markdown_adapter"; import * as React from "react"; +import { connect } from "react-redux"; +import { Link } from "react-router-dom"; +import { deleteAnnotation, downloadAnnotation, resetAnnotation } from "admin/admin_rest_api"; +import { finishTask, peekNextTasks, requestTask } from "admin/api/tasks"; import classNames from "classnames"; -import type { APITaskWithAnnotation, APIUser, APIAnnotation } from "types/api_flow_types"; import { AsyncButton, AsyncLink } from "components/async_clickables"; -import type { OxalisState } from "oxalis/store"; -import { deleteAnnotation, resetAnnotation, downloadAnnotation } from "admin/admin_rest_api"; -import { finishTask, requestTask, peekNextTasks } from "admin/api/tasks"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; -import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; -import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_accessor"; -import { handleGenericError } from "libs/error_handling"; import FormattedDate from "components/formatted_date"; import LinkButton from "components/link_button"; +import TransferTaskModal from "dashboard/transfer_task_modal"; +import { handleGenericError } from "libs/error_handling"; import Persistence from "libs/persistence"; import Request from "libs/request"; import Toast from "libs/toast"; -import TransferTaskModal from "dashboard/transfer_task_modal"; import * as Utils from "libs/utils"; import messages from "messages"; +import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_accessor"; +import type { OxalisState } from "oxalis/store"; import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; +import type { APIAnnotation, APITaskWithAnnotation, APIUser } from "types/api_flow_types"; import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; const pageLength: number = 1000; diff --git a/frontend/javascripts/dashboard/dashboard_view.tsx b/frontend/javascripts/dashboard/dashboard_view.tsx index b1006e77214..4ff1abeeb8c 100644 --- a/frontend/javascripts/dashboard/dashboard_view.tsx +++ b/frontend/javascripts/dashboard/dashboard_view.tsx @@ -1,33 +1,33 @@ -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { Spin, Tabs } from "antd"; -import { connect } from "react-redux"; -import type { Dispatch } from "redux"; -import type React from "react"; -import { PureComponent } from "react"; -import _ from "lodash"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import { WhatsNextHeader } from "admin/welcome_ui"; -import type { APIOrganization, APIPricingPlanStatus, APIUser } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; import { cachedGetPricingPlanStatus, getUser, updateNovelUserExperienceInfos, } from "admin/admin_rest_api"; +import { PlanAboutToExceedAlert, PlanExceededAlert } from "admin/organization/organization_cards"; +import { WhatsNextHeader } from "admin/welcome_ui"; +import { Spin, Tabs } from "antd"; import DashboardTaskListView from "dashboard/dashboard_task_list_view"; -import { PublicationViewWithHeader } from "dashboard/publication_view"; import ExplorativeAnnotationsView from "dashboard/explorative_annotations_view"; -import NmlUploadZoneContainer from "oxalis/view/nml_upload_zone_container"; +import { PublicationViewWithHeader } from "dashboard/publication_view"; +import features from "features"; import Request from "libs/request"; import UserLocalStorage from "libs/user_local_storage"; -import features from "features"; -import { PlanAboutToExceedAlert, PlanExceededAlert } from "admin/organization/organization_cards"; +import _ from "lodash"; +import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; +import { enforceActiveUser } from "oxalis/model/accessors/user_accessor"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import type { OxalisState } from "oxalis/store"; import { PortalTarget } from "oxalis/view/layouting/portal_utils"; -import { DatasetFolderView } from "./dataset_folder_view"; +import NmlUploadZoneContainer from "oxalis/view/nml_upload_zone_container"; +import type React from "react"; +import { PureComponent } from "react"; +import { connect } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import type { Dispatch } from "redux"; +import type { APIOrganization, APIPricingPlanStatus, APIUser } from "types/api_flow_types"; import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; -import { enforceActiveOrganization } from "oxalis/model/accessors/organization_accessors"; +import { DatasetFolderView } from "./dataset_folder_view"; type OwnProps = { userId: string | null | undefined; diff --git a/frontend/javascripts/dashboard/dataset/color_layer_ordering_component.tsx b/frontend/javascripts/dashboard/dataset/color_layer_ordering_component.tsx index 67a37ab5953..2f07a49d394 100644 --- a/frontend/javascripts/dashboard/dataset/color_layer_ordering_component.tsx +++ b/frontend/javascripts/dashboard/dataset/color_layer_ordering_component.tsx @@ -1,9 +1,9 @@ -import { MenuOutlined, InfoCircleOutlined } from "@ant-design/icons"; -import { List, Collapse, Tooltip, type CollapseProps } from "antd"; -import { settings, settingsTooltips } from "messages"; +import { InfoCircleOutlined, MenuOutlined } from "@ant-design/icons"; import { DndContext, type DragEndEvent } from "@dnd-kit/core"; -import { CSS } from "@dnd-kit/utilities"; import { SortableContext, useSortable, verticalListSortingStrategy } from "@dnd-kit/sortable"; +import { CSS } from "@dnd-kit/utilities"; +import { Collapse, type CollapseProps, List, Tooltip } from "antd"; +import { settings, settingsTooltips } from "messages"; // Example taken and modified from https://ant.design/components/table/#components-table-demo-drag-sorting-handler. diff --git a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx index b73d7564a17..f419f13375d 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx @@ -1,27 +1,27 @@ +import { useIsMutating } from "@tanstack/react-query"; +import { type DatasetUpdater, getDatastores, triggerDatasetCheck } from "admin/admin_rest_api"; +import { useEffectOnlyOnce, usePrevious } from "libs/react_hooks"; +import UserLocalStorage from "libs/user_local_storage"; +import _ from "lodash"; import type React from "react"; import { createContext, useCallback, useContext, useEffect, useMemo, useState } from "react"; import type { + APIDataset, APIDatasetCompact, APIDatasetCompactWithoutStatusAndLayerNames, FolderItem, - APIDataset, } from "types/api_flow_types"; -import { type DatasetUpdater, getDatastores, triggerDatasetCheck } from "admin/admin_rest_api"; -import UserLocalStorage from "libs/user_local_storage"; -import _ from "lodash"; import { - useFolderHierarchyQuery, - useDatasetsInFolderQuery, - useDatasetSearchQuery, useCreateFolderMutation, - useUpdateFolderMutation, - useMoveFolderMutation, + useDatasetSearchQuery, + useDatasetsInFolderQuery, useDeleteFolderMutation, - useUpdateDatasetMutation, + useFolderHierarchyQuery, useFolderQuery, + useMoveFolderMutation, + useUpdateDatasetMutation, + useUpdateFolderMutation, } from "./queries"; -import { useIsMutating } from "@tanstack/react-query"; -import { useEffectOnlyOnce, usePrevious } from "libs/react_hooks"; export type DatasetCollectionContextValue = { datasets: Array; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index e8bbe2aee2a..36e6bc08ba9 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -1,39 +1,39 @@ +import { DeleteOutlined } from "@ant-design/icons"; +import { startFindLargestSegmentIdJob } from "admin/admin_rest_api"; +import { getDatasetNameRules, layerNameRules } from "admin/dataset/dataset_components"; +import { useStartAndPollJob } from "admin/job/job_hooks"; import { - List, - Input, + Button, + Col, Form, + type FormInstance, + Input, InputNumber, - Col, + List, Row, - Switch, - Tooltip, - type FormInstance, Select, Space, - Button, + Switch, + Tooltip, } from "antd"; -import * as React from "react"; -import { Vector3Input, BoundingBoxInput } from "libs/vector_input"; -import { getBitDepth } from "oxalis/model/accessors/dataset_accessor"; -import { validateDatasourceJSON, isValidJSON, syncValidator } from "types/validation"; -import type { BoundingBoxObject, OxalisState } from "oxalis/store"; import { - Hideable, FormItemWithInfo, + Hideable, RetryingErrorBoundary, jsonEditStyle, } from "dashboard/dataset/helper_components"; -import { startFindLargestSegmentIdJob } from "admin/admin_rest_api"; +import Toast from "libs/toast"; import { jsonStringify, parseMaybe } from "libs/utils"; -import type { DataLayer } from "types/schemas/datasource.types"; -import { getDatasetNameRules, layerNameRules } from "admin/dataset/dataset_components"; +import { BoundingBoxInput, Vector3Input } from "libs/vector_input"; +import { AllUnits, LongUnitToShortUnitMap, type Vector3 } from "oxalis/constants"; +import { getBitDepth } from "oxalis/model/accessors/dataset_accessor"; +import type { BoundingBoxObject, OxalisState } from "oxalis/store"; +import * as React from "react"; import { useSelector } from "react-redux"; -import { DeleteOutlined } from "@ant-design/icons"; import { type APIDataLayer, type APIDataset, APIJobType } from "types/api_flow_types"; -import { useStartAndPollJob } from "admin/job/job_hooks"; -import { AllUnits, LongUnitToShortUnitMap, type Vector3 } from "oxalis/constants"; -import Toast from "libs/toast"; import type { ArbitraryObject } from "types/globals"; +import type { DataLayer } from "types/schemas/datasource.types"; +import { isValidJSON, syncValidator, validateDatasourceJSON } from "types/validation"; const FormItem = Form.Item; @@ -350,7 +350,7 @@ function SimpleLayerForm({ }, initialJobKeyExtractor: (job) => job.type === "find_largest_segment_id" && job.datasetName === dataset?.name - ? job.datasetName ?? "largest_segment_id" + ? (job.datasetName ?? "largest_segment_id") : null, }); const activeJob = runningJobs[0]; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx index c5bb7b73921..0263ce12dd5 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx @@ -1,13 +1,13 @@ +import { useQueryClient } from "@tanstack/react-query"; +import { deleteDatasetOnDisk, getDataset } from "admin/admin_rest_api"; import { Button } from "antd"; -import { useState, useEffect } from "react"; -import type { APIDataset } from "types/api_flow_types"; -import { getDataset, deleteDatasetOnDisk } from "admin/admin_rest_api"; import Toast from "libs/toast"; import messages from "messages"; +import { useEffect, useState } from "react"; import type { RouteComponentProps } from "react-router-dom"; import { withRouter } from "react-router-dom"; +import type { APIDataset } from "types/api_flow_types"; import { confirmAsync } from "./helper_components"; -import { useQueryClient } from "@tanstack/react-query"; type Props = { datasetId: string; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_metadata_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_metadata_tab.tsx index 1ef0e257afb..cf45bd216a3 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_metadata_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_metadata_tab.tsx @@ -1,4 +1,4 @@ -import { Input, Col, Row, DatePicker } from "antd"; +import { Col, DatePicker, Input, Row } from "antd"; import { FormItemWithInfo } from "./helper_components"; export default function DatasetSettingsMetadataTab() { diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx index a25b7eba64d..90f3ad9dfc5 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx @@ -1,22 +1,22 @@ -import type React from "react"; -import { useState, useEffect } from "react"; -import { type RouteComponentProps, withRouter } from "react-router-dom"; -import { connect } from "react-redux"; -import { Button, Input, Checkbox, Tooltip, type FormInstance, Collapse, Space } from "antd"; import { CopyOutlined, InfoCircleOutlined, RetweetOutlined } from "@ant-design/icons"; -import type { APIDataset, APIUser } from "types/api_flow_types"; -import { AsyncButton } from "components/async_clickables"; import { getDatasetSharingToken, revokeDatasetSharingToken } from "admin/admin_rest_api"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import { Button, Checkbox, Collapse, type FormInstance, Input, Space, Tooltip } from "antd"; +import { AsyncButton } from "components/async_clickables"; +import { PricingEnforcedBlur } from "components/pricing_enforcers"; +import DatasetAccessListView from "dashboard/advanced_dataset/dataset_access_list_view"; +import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; import Toast from "libs/toast"; +import { isUserAdminOrDatasetManager, isUserAdminOrTeamManager } from "libs/utils"; import window from "libs/window"; -import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; -import DatasetAccessListView from "dashboard/advanced_dataset/dataset_access_list_view"; +import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; import type { OxalisState } from "oxalis/store"; -import { isUserAdminOrDatasetManager, isUserAdminOrTeamManager } from "libs/utils"; +import type React from "react"; +import { useEffect, useState } from "react"; +import { connect } from "react-redux"; +import { type RouteComponentProps, withRouter } from "react-router-dom"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import { FormItemWithInfo } from "./helper_components"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; -import { PricingEnforcedBlur } from "components/pricing_enforcers"; -import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; type Props = { form: FormInstance | null; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index da35dcef146..890aeae8cab 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -1,48 +1,48 @@ -import { Button, Spin, Alert, Form, Card, Tabs, Tooltip, type FormInstance } from "antd"; import { ExclamationCircleOutlined } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import dayjs from "dayjs"; -import { connect } from "react-redux"; -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter, Link } from "react-router-dom"; -import type { - UnregisterCallback, - Location as HistoryLocation, - Action as HistoryAction, -} from "history"; -import type { - APIDataSource, - APIDataset, - MutableAPIDataset, - APIMessage, -} from "types/api_flow_types"; -import { Unicode } from "oxalis/constants"; -import type { DatasetConfiguration, OxalisState } from "oxalis/store"; -import { diffObjects, jsonStringify } from "libs/utils"; +import { defaultContext } from "@tanstack/react-query"; import { getDataset, getDatasetDefaultConfiguration, - updateDatasetDefaultConfiguration, readDatasetDatasource, - updateDatasetDatasource, - updateDatasetTeams, sendAnalyticsEvent, + updateDatasetDatasource, + updateDatasetDefaultConfiguration, updateDatasetPartial, + updateDatasetTeams, } from "admin/admin_rest_api"; +import { Alert, Button, Card, Form, type FormInstance, Spin, Tabs, Tooltip } from "antd"; +import dayjs from "dayjs"; +import features from "features"; +import type { + Action as HistoryAction, + Location as HistoryLocation, + UnregisterCallback, +} from "history"; import { handleGenericError } from "libs/error_handling"; import Toast from "libs/toast"; +import { diffObjects, jsonStringify } from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import features from "features"; +import { Unicode } from "oxalis/constants"; +import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import type { DatasetConfiguration, OxalisState } from "oxalis/store"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { Link, withRouter } from "react-router-dom"; +import type { + APIDataSource, + APIDataset, + APIMessage, + MutableAPIDataset, +} from "types/api_flow_types"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; -import { Hideable, hasFormError } from "./helper_components"; -import DatasetSettingsViewConfigTab from "./dataset_settings_viewconfig_tab"; +import DatasetSettingsDataTab, { syncDataSourceFields } from "./dataset_settings_data_tab"; +import DatasetSettingsDeleteTab from "./dataset_settings_delete_tab"; import DatasetSettingsMetadataTab from "./dataset_settings_metadata_tab"; import DatasetSettingsSharingTab from "./dataset_settings_sharing_tab"; -import DatasetSettingsDeleteTab from "./dataset_settings_delete_tab"; -import DatasetSettingsDataTab, { syncDataSourceFields } from "./dataset_settings_data_tab"; -import { defaultContext } from "@tanstack/react-query"; -import { getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import DatasetSettingsViewConfigTab from "./dataset_settings_viewconfig_tab"; +import { Hideable, hasFormError } from "./helper_components"; const FormItem = Form.Item; const notImportedYetStatus = "Not imported yet."; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx index 803d4fcf188..b85ce2a998d 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx @@ -1,35 +1,35 @@ -import _ from "lodash"; import { InfoCircleOutlined } from "@ant-design/icons"; +import { getAgglomeratesForDatasetLayer, getMappingsForDatasetLayer } from "admin/admin_rest_api"; import { + Alert, + Checkbox, + Col, + Divider, Form, Input, - Checkbox, - Alert, InputNumber, - Col, Row, - Tooltip, - Table, Select, - Divider, + Table, + Tooltip, } from "antd"; -import { useMemo, useState } from "react"; +import { Slider } from "components/slider"; import { Vector3Input } from "libs/vector_input"; -import { validateLayerViewConfigurationObjectJSON, syncValidator } from "types/validation"; -import { getDefaultLayerViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; +import _ from "lodash"; import messages, { type RecommendedConfiguration, layerViewConfigurations, settings, settingsTooltips, } from "messages"; -import type { DatasetConfiguration, DatasetLayerConfiguration } from "oxalis/store"; -import { FormItemWithInfo, jsonEditStyle } from "./helper_components"; import { BLEND_MODES } from "oxalis/constants"; -import ColorLayerOrderingTable from "./color_layer_ordering_component"; +import type { DatasetConfiguration, DatasetLayerConfiguration } from "oxalis/store"; +import { useMemo, useState } from "react"; import type { APIDataSourceId } from "types/api_flow_types"; -import { getAgglomeratesForDatasetLayer, getMappingsForDatasetLayer } from "admin/admin_rest_api"; -import { Slider } from "components/slider"; +import { getDefaultLayerViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; +import { syncValidator, validateLayerViewConfigurationObjectJSON } from "types/validation"; +import ColorLayerOrderingTable from "./color_layer_ordering_component"; +import { FormItemWithInfo, jsonEditStyle } from "./helper_components"; const FormItem = Form.Item; diff --git a/frontend/javascripts/dashboard/dataset/helper_components.tsx b/frontend/javascripts/dashboard/dataset/helper_components.tsx index 60d57b226ca..53f4ac04462 100644 --- a/frontend/javascripts/dashboard/dataset/helper_components.tsx +++ b/frontend/javascripts/dashboard/dataset/helper_components.tsx @@ -1,10 +1,10 @@ -import { Alert, Form, Tooltip, Modal } from "antd"; -import type { FieldError } from "rc-field-form/es/interface"; import { InfoCircleOutlined } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import type { NamePath } from "antd/lib/form/interface"; +import { Alert, Form, Modal, Tooltip } from "antd"; import type { FormItemProps, Rule } from "antd/lib/form"; +import type { NamePath } from "antd/lib/form/interface"; +import _ from "lodash"; +import type { FieldError } from "rc-field-form/es/interface"; +import * as React from "react"; const FormItem = Form.Item; diff --git a/frontend/javascripts/dashboard/dataset/queries.tsx b/frontend/javascripts/dashboard/dataset/queries.tsx index a9207f76878..e686a7469b1 100644 --- a/frontend/javascripts/dashboard/dataset/queries.tsx +++ b/frontend/javascripts/dashboard/dataset/queries.tsx @@ -1,6 +1,4 @@ -import _ from "lodash"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; -import * as Utils from "libs/utils"; import { type DatasetUpdater, getDataset, @@ -15,18 +13,20 @@ import { moveFolder, updateFolder, } from "admin/api/folders"; +import { handleGenericError } from "libs/error_handling"; import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; import { useEffect, useRef } from "react"; import { + type APIDataset, type APIDatasetCompact, type FlatFolderTreeItem, type Folder, type FolderItem, type FolderUpdater, convertDatasetToCompact, - type APIDataset, } from "types/api_flow_types"; -import { handleGenericError } from "libs/error_handling"; export const SEARCH_RESULTS_LIMIT = 100; export const MINIMUM_SEARCH_QUERY_LENGTH = 3; diff --git a/frontend/javascripts/dashboard/dataset/team_selection_component.tsx b/frontend/javascripts/dashboard/dataset/team_selection_component.tsx index 66d30b5acc5..5bbd2ce1652 100644 --- a/frontend/javascripts/dashboard/dataset/team_selection_component.tsx +++ b/frontend/javascripts/dashboard/dataset/team_selection_component.tsx @@ -1,8 +1,8 @@ +import { getEditableTeams, getTeams } from "admin/admin_rest_api"; import { Select } from "antd"; -import * as React from "react"; import _ from "lodash"; +import * as React from "react"; import type { APITeam } from "types/api_flow_types"; -import { getEditableTeams, getTeams } from "admin/admin_rest_api"; const { Option } = Select; diff --git a/frontend/javascripts/dashboard/dataset_folder_view.tsx b/frontend/javascripts/dashboard/dataset_folder_view.tsx index 157da42d1fb..92c19160822 100644 --- a/frontend/javascripts/dashboard/dataset_folder_view.tsx +++ b/frontend/javascripts/dashboard/dataset_folder_view.tsx @@ -1,19 +1,19 @@ +import { Button, Card, Col, Row } from "antd"; +import features, { getDemoDatasetUrl } from "features"; import { filterNullValues } from "libs/utils"; +import * as Utils from "libs/utils"; +import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; import React, { useEffect, useState } from "react"; +import { Link } from "react-router-dom"; import type { APIDatasetCompact, APIUser, FolderItem } from "types/api_flow_types"; import DatasetCollectionContextProvider, { useDatasetCollectionContext, } from "./dataset/dataset_collection_context"; -import { Button, Card, Col, Row } from "antd"; -import { Link } from "react-router-dom"; -import * as Utils from "libs/utils"; +import { useDatasetsInFolderQuery, useFolderHierarchyQuery } from "./dataset/queries"; import DatasetView, { DatasetAddButton, DatasetRefreshButton } from "./dataset_view"; import { DetailsSidebar } from "./folders/details_sidebar"; import { EditFolderModal } from "./folders/edit_folder_modal"; import { FolderTreeSidebar } from "./folders/folder_tree"; -import features, { getDemoDatasetUrl } from "features"; -import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; -import { useFolderHierarchyQuery, useDatasetsInFolderQuery } from "./dataset/queries"; type Props = { user: APIUser; diff --git a/frontend/javascripts/dashboard/dataset_view.tsx b/frontend/javascripts/dashboard/dataset_view.tsx index 4ab03b85bc4..a75db51e3b8 100644 --- a/frontend/javascripts/dashboard/dataset_view.tsx +++ b/frontend/javascripts/dashboard/dataset_view.tsx @@ -1,51 +1,51 @@ -import React, { useState, useEffect } from "react"; -import { Link } from "react-router-dom"; import { + HourglassOutlined, + InfoCircleOutlined, + LoadingOutlined, + PlusOutlined, + ReloadOutlined, + SearchOutlined, + SettingOutlined, +} from "@ant-design/icons"; +import { PropTypes } from "@scalableminds/prop-types"; +import { getJobs } from "admin/admin_rest_api"; +import { TOOLTIP_MESSAGES_AND_ICONS } from "admin/job/job_list_view"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import { + Alert, Badge, Button, - Radio, Col, Dropdown, Input, + Radio, Row, - Spin, - Tooltip, - Alert, Select, Space, + Spin, + Tooltip, } from "antd"; -import { - LoadingOutlined, - PlusOutlined, - ReloadOutlined, - SettingOutlined, - InfoCircleOutlined, - HourglassOutlined, - SearchOutlined, -} from "@ant-design/icons"; -import { PropTypes } from "@scalableminds/prop-types"; -import type { APIJob, APIDatasetCompact, APIUser, FolderItem } from "types/api_flow_types"; +import type { ItemType } from "antd/es/menu/interface"; +import FormattedDate from "components/formatted_date"; +import { PricingEnforcedButton } from "components/pricing_enforcers"; import DatasetTable from "dashboard/advanced_dataset/dataset_table"; -import * as Utils from "libs/utils"; -import { CategorizationSearch } from "oxalis/view/components/categorization_label"; +import dayjs from "dayjs"; import features from "features"; import Persistence from "libs/persistence"; -import { getJobs } from "admin/admin_rest_api"; -import dayjs from "dayjs"; -import FormattedDate from "components/formatted_date"; -import { TOOLTIP_MESSAGES_AND_ICONS } from "admin/job/job_list_view"; +import * as Utils from "libs/utils"; import { Unicode } from "oxalis/constants"; +import { CategorizationSearch } from "oxalis/view/components/categorization_label"; import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; +import type { MenuProps } from "rc-menu"; +import React, { useState, useEffect } from "react"; +import { Link } from "react-router-dom"; +import type { APIDatasetCompact, APIJob, APIUser, FolderItem } from "types/api_flow_types"; import type { DatasetCollectionContextValue } from "./dataset/dataset_collection_context"; import { MINIMUM_SEARCH_QUERY_LENGTH, SEARCH_RESULTS_LIMIT, useFolderQuery, } from "./dataset/queries"; -import { PricingEnforcedButton } from "components/pricing_enforcers"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; -import type { MenuProps } from "rc-menu"; -import type { ItemType } from "antd/es/menu/interface"; type Props = { user: APIUser; diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index c0f1936425a..4bf4dd26d64 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -1,71 +1,71 @@ -import { Link } from "react-router-dom"; -import { PropTypes } from "@scalableminds/prop-types"; -import { - Spin, - Input, - Table, - Button, - Modal, - Tooltip, - Tag, - Row, - Col, - Card, - type TableProps, -} from "antd"; import { + CopyOutlined, DownloadOutlined, FolderOpenOutlined, InboxOutlined, + LockOutlined, PlayCircleOutlined, PlusOutlined, - UploadOutlined, - CopyOutlined, TeamOutlined, - UserOutlined, - LockOutlined, UnlockOutlined, + UploadOutlined, + UserOutlined, } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import update from "immutability-helper"; -import { AsyncLink } from "components/async_clickables"; -import { - annotationToCompact, - type APIAnnotationInfo, - type APIUser, - type APIUserCompact, -} from "types/api_flow_types"; -import { AnnotationContentTypes } from "oxalis/constants"; +import { PropTypes } from "@scalableminds/prop-types"; import { - finishAllAnnotations, + downloadAnnotation, editAnnotation, + editLockedState, + finishAllAnnotations, finishAnnotation, - reOpenAnnotation, - downloadAnnotation, getCompactAnnotationsForUser, getReadableAnnotations, - editLockedState, + reOpenAnnotation, } from "admin/admin_rest_api"; -import { formatHash, stringToColor } from "libs/format_utils"; -import { handleGenericError } from "libs/error_handling"; -import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; -import EditableTextIcon from "oxalis/view/components/editable_text_icon"; +import { + Button, + Card, + Col, + Input, + Modal, + Row, + Spin, + Table, + type TableProps, + Tag, + Tooltip, +} from "antd"; +import type { SearchProps } from "antd/lib/input"; +import { AsyncLink } from "components/async_clickables"; import FormattedDate from "components/formatted_date"; +import TextWithDescription from "components/text_with_description"; +import update from "immutability-helper"; +import { handleGenericError } from "libs/error_handling"; +import { formatHash, stringToColor } from "libs/format_utils"; import Persistence from "libs/persistence"; -import CategorizationLabel, { - CategorizationSearch, -} from "oxalis/view/components/categorization_label"; -import Store from "oxalis/store"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; -import TextWithDescription from "components/text_with_description"; +import { AnnotationContentTypes } from "oxalis/constants"; import { getVolumeDescriptors } from "oxalis/model/accessors/volumetracing_accessor"; +import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; +import Store from "oxalis/store"; +import CategorizationLabel, { + CategorizationSearch, +} from "oxalis/view/components/categorization_label"; +import EditableTextIcon from "oxalis/view/components/editable_text_icon"; import { RenderToPortal } from "oxalis/view/layouting/portal_utils"; -import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; -import type { SearchProps } from "antd/lib/input"; import { AnnotationStats } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; +import * as React from "react"; +import { Link } from "react-router-dom"; +import { + type APIAnnotationInfo, + type APIUser, + type APIUserCompact, + annotationToCompact, +} from "types/api_flow_types"; +import { ActiveTabContext, RenderingTabContext } from "./dashboard_contexts"; const { Search } = Input; const pageLength: number = 1000; diff --git a/frontend/javascripts/dashboard/folders/details_sidebar.tsx b/frontend/javascripts/dashboard/folders/details_sidebar.tsx index f5eb3249a50..9595125dcef 100644 --- a/frontend/javascripts/dashboard/folders/details_sidebar.tsx +++ b/frontend/javascripts/dashboard/folders/details_sidebar.tsx @@ -1,30 +1,30 @@ import { + EditOutlined, FileOutlined, FolderOpenOutlined, - SearchOutlined, - EditOutlined, LoadingOutlined, + SearchOutlined, } from "@ant-design/icons"; +import { useQuery } from "@tanstack/react-query"; +import { getOrganization } from "admin/admin_rest_api"; import { Result, Spin, Tag, Tooltip } from "antd"; -import { stringToColor, formatCountToDataAmountUnit } from "libs/format_utils"; +import { formatCountToDataAmountUnit, stringToColor } from "libs/format_utils"; +import Markdown from "libs/markdown_adapter"; import { pluralize } from "libs/utils"; import _ from "lodash"; +import type { OxalisState } from "oxalis/store"; import { DatasetExtentRow, OwningOrganizationRow, VoxelSizeRow, } from "oxalis/view/right-border-tabs/dataset_info_tab_view"; import { useEffect } from "react"; +import { useSelector } from "react-redux"; import type { APIDatasetCompact, Folder } from "types/api_flow_types"; import { DatasetLayerTags, DatasetTags, TeamTags } from "../advanced_dataset/dataset_table"; import { useDatasetCollectionContext } from "../dataset/dataset_collection_context"; import { SEARCH_RESULTS_LIMIT, useDatasetQuery, useFolderQuery } from "../dataset/queries"; -import { useSelector } from "react-redux"; -import type { OxalisState } from "oxalis/store"; -import { getOrganization } from "admin/admin_rest_api"; -import { useQuery } from "@tanstack/react-query"; import MetadataTable from "./metadata_table"; -import Markdown from "libs/markdown_adapter"; export function DetailsSidebar({ selectedDatasets, diff --git a/frontend/javascripts/dashboard/folders/folder_selection.tsx b/frontend/javascripts/dashboard/folders/folder_selection.tsx index 49681796820..aa510a28c66 100644 --- a/frontend/javascripts/dashboard/folders/folder_selection.tsx +++ b/frontend/javascripts/dashboard/folders/folder_selection.tsx @@ -1,6 +1,6 @@ -import _ from "lodash"; import { TreeSelect } from "antd"; -import { useState, useEffect } from "react"; +import _ from "lodash"; +import { useEffect, useState } from "react"; import { useFolderHierarchyQuery } from "dashboard/dataset/queries"; import type { FolderItem } from "types/api_flow_types"; diff --git a/frontend/javascripts/dashboard/folders/folder_tree.tsx b/frontend/javascripts/dashboard/folders/folder_tree.tsx index 53c6d5effa4..8e1f404467d 100644 --- a/frontend/javascripts/dashboard/folders/folder_tree.tsx +++ b/frontend/javascripts/dashboard/folders/folder_tree.tsx @@ -8,14 +8,14 @@ import { } from "../dataset/dataset_collection_context"; import { DeleteOutlined, EditOutlined, PlusOutlined } from "@ant-design/icons"; -import { Dropdown, Modal, type MenuProps, Tree } from "antd"; -import Toast from "libs/toast"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import { Dropdown, type MenuProps, Modal, Tree } from "antd"; import type { DataNode, DirectoryTreeProps } from "antd/lib/tree"; -import memoizeOne from "memoize-one"; import classNames from "classnames"; -import type { FolderItem } from "types/api_flow_types"; import { PricingEnforcedSpan } from "components/pricing_enforcers"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import Toast from "libs/toast"; +import memoizeOne from "memoize-one"; +import type { FolderItem } from "types/api_flow_types"; import type { ArbitraryObject } from "types/globals"; const { DirectoryTree } = Tree; diff --git a/frontend/javascripts/dashboard/folders/metadata_table.tsx b/frontend/javascripts/dashboard/folders/metadata_table.tsx index 5951328e836..c8ecff00765 100644 --- a/frontend/javascripts/dashboard/folders/metadata_table.tsx +++ b/frontend/javascripts/dashboard/folders/metadata_table.tsx @@ -7,13 +7,13 @@ import { TagsOutlined, } from "@ant-design/icons"; import { - type MenuProps, - type InputNumberProps, - InputNumber, + Button, + Dropdown, Input, + InputNumber, + type InputNumberProps, + type MenuProps, Select, - Dropdown, - Button, Tag, } from "antd"; import FastTooltip from "components/fast_tooltip"; @@ -30,9 +30,9 @@ import { useEffect } from "react"; import { useState } from "react"; import { type APIDataset, - type Folder, type APIMetadataEntry, APIMetadataEnum, + type Folder, } from "types/api_flow_types"; export type APIMetadataWithError = APIMetadataEntry & { error?: string | null }; diff --git a/frontend/javascripts/dashboard/publication_card.tsx b/frontend/javascripts/dashboard/publication_card.tsx index 5855b2ddd13..fadd231c9d0 100644 --- a/frontend/javascripts/dashboard/publication_card.tsx +++ b/frontend/javascripts/dashboard/publication_card.tsx @@ -1,19 +1,19 @@ -import { Card, Button, Tooltip } from "antd"; import { LinkOutlined } from "@ant-design/icons"; -import Markdown from "libs/markdown_adapter"; -import type React from "react"; -import { useState } from "react"; +import { Button, Card, Tooltip } from "antd"; import classNames from "classnames"; -import { Link } from "react-router-dom"; -import type { APIDataset, APIPublication, APIPublicationAnnotation } from "types/api_flow_types"; import { formatScale } from "libs/format_utils"; +import Markdown from "libs/markdown_adapter"; +import { compareBy } from "libs/utils"; import { + getDatasetExtentAsString, + getSegmentationThumbnailURL, getThumbnailURL, hasSegmentation, - getSegmentationThumbnailURL, - getDatasetExtentAsString, } from "oxalis/model/accessors/dataset_accessor"; -import { compareBy } from "libs/utils"; +import type React from "react"; +import { useState } from "react"; +import { Link } from "react-router-dom"; +import type { APIDataset, APIPublication, APIPublicationAnnotation } from "types/api_flow_types"; type DatasetDetails = { species?: string; diff --git a/frontend/javascripts/dashboard/publication_details_view.tsx b/frontend/javascripts/dashboard/publication_details_view.tsx index 2c557b8afde..f13587a5437 100644 --- a/frontend/javascripts/dashboard/publication_details_view.tsx +++ b/frontend/javascripts/dashboard/publication_details_view.tsx @@ -1,11 +1,11 @@ -import { useState, useEffect } from "react"; -import { Layout, Spin, Tooltip } from "antd"; import { ArrowLeftOutlined } from "@ant-design/icons"; import { getPublication } from "admin/admin_rest_api"; -import type { APIPublication } from "types/api_flow_types"; +import { Layout, Spin, Tooltip } from "antd"; import PublicationCard from "dashboard/publication_card"; import { handleGenericError } from "libs/error_handling"; +import { useEffect, useState } from "react"; import { Link } from "react-router-dom"; +import type { APIPublication } from "types/api_flow_types"; const { Content } = Layout; function PublicationDetailView({ publicationId }: { publicationId: string }) { diff --git a/frontend/javascripts/dashboard/publication_view.tsx b/frontend/javascripts/dashboard/publication_view.tsx index 0a26178910a..b4f10763a14 100644 --- a/frontend/javascripts/dashboard/publication_view.tsx +++ b/frontend/javascripts/dashboard/publication_view.tsx @@ -1,11 +1,11 @@ -import type React from "react"; -import { memo, useState, useEffect } from "react"; -import { List, Input, Spin } from "antd"; -import type { APIPublication } from "types/api_flow_types"; -import PublicationCard from "dashboard/publication_card"; -import * as Utils from "libs/utils"; import { getPublications } from "admin/admin_rest_api"; +import { Input, List, Spin } from "antd"; +import PublicationCard from "dashboard/publication_card"; import { handleGenericError } from "libs/error_handling"; +import * as Utils from "libs/utils"; +import type React from "react"; +import { memo, useEffect, useState } from "react"; +import type { APIPublication } from "types/api_flow_types"; const { Search } = Input; export function PublicationViewWithHeader() { const [isLoading, setIsLoading] = useState(false); diff --git a/frontend/javascripts/dashboard/transfer_task_modal.tsx b/frontend/javascripts/dashboard/transfer_task_modal.tsx index f827cdf6e25..5dadc590e4b 100644 --- a/frontend/javascripts/dashboard/transfer_task_modal.tsx +++ b/frontend/javascripts/dashboard/transfer_task_modal.tsx @@ -1,9 +1,9 @@ -import { Modal, Button } from "antd"; +import { transferTask } from "admin/api/tasks"; +import UserSelectionComponent from "admin/user/user_selection_component"; +import { Button, Modal } from "antd"; +import { handleGenericError } from "libs/error_handling"; import * as React from "react"; import type { APIAnnotation } from "types/api_flow_types"; -import { handleGenericError } from "libs/error_handling"; -import UserSelectionComponent from "admin/user/user_selection_component"; -import { transferTask } from "admin/api/tasks"; type Props = { onChange: (updatedAnnotation: APIAnnotation) => void; diff --git a/frontend/javascripts/libs/DRACOLoader.ts b/frontend/javascripts/libs/DRACOLoader.ts index 7f3893656a4..9ce0e2d77b1 100644 --- a/frontend/javascripts/libs/DRACOLoader.ts +++ b/frontend/javascripts/libs/DRACOLoader.ts @@ -8,8 +8,8 @@ import { BufferGeometry, Color, FileLoader, - Loader, LinearSRGBColorSpace, + Loader, SRGBColorSpace, } from "three"; diff --git a/frontend/javascripts/libs/async/debounced_abortable_saga.ts b/frontend/javascripts/libs/async/debounced_abortable_saga.ts index 326ca5b1b25..6830c773883 100644 --- a/frontend/javascripts/libs/async/debounced_abortable_saga.ts +++ b/frontend/javascripts/libs/async/debounced_abortable_saga.ts @@ -1,5 +1,5 @@ -import { call, type Saga } from "oxalis/model/sagas/effect-generators"; -import { buffers, type Channel, channel, runSaga } from "redux-saga"; +import { type Saga, call } from "oxalis/model/sagas/effect-generators"; +import { type Channel, buffers, channel, runSaga } from "redux-saga"; import { delay, race, take } from "redux-saga/effects"; // biome-ignore lint/complexity/noBannedTypes: This is copied from redux-saga because it cannot be imported. diff --git a/frontend/javascripts/libs/async/task_pool.ts b/frontend/javascripts/libs/async/task_pool.ts index 610e4503a05..9647eda6deb 100644 --- a/frontend/javascripts/libs/async/task_pool.ts +++ b/frontend/javascripts/libs/async/task_pool.ts @@ -1,5 +1,5 @@ import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { join, call, fork, type FixedTask } from "typed-redux-saga"; +import { type FixedTask, call, fork, join } from "typed-redux-saga"; /* Given an array of async tasks, processTaskWithPool diff --git a/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts b/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts index e17a6b6f6b7..e155d895616 100644 --- a/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts +++ b/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts @@ -1,7 +1,7 @@ -import * as THREE from "three"; import type UpdatableTexture from "libs/UpdatableTexture"; import { getRenderer } from "oxalis/controller/renderer"; import { createUpdatableTexture } from "oxalis/geometries/materials/plane_material_factory_helpers"; +import * as THREE from "three"; const DEFAULT_LOAD_FACTOR = 0.9; export const EMPTY_KEY_VALUE = 2 ** 32 - 1; diff --git a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts index e72f64c73a7..60281866cd2 100644 --- a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts @@ -1,6 +1,6 @@ +import type { NumberLike } from "oxalis/store"; import * as THREE from "three"; import { AbstractCuckooTable, EMPTY_KEY_VALUE } from "./abstract_cuckoo_table"; -import type { NumberLike } from "oxalis/store"; const EMPTY_KEY = EMPTY_KEY_VALUE; const EMPTY_VALUE = EMPTY_KEY_VALUE; diff --git a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts index 6874da986ab..2fcedb7580b 100644 --- a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts @@ -1,6 +1,6 @@ import { convertNumberTo64BitTuple } from "libs/utils"; -import { AbstractCuckooTable, EMPTY_KEY_VALUE } from "./abstract_cuckoo_table"; import type { NumberLike } from "oxalis/store"; +import { AbstractCuckooTable, EMPTY_KEY_VALUE } from "./abstract_cuckoo_table"; const EMPTY_KEY = [EMPTY_KEY_VALUE, EMPTY_KEY_VALUE] as Value; const EMPTY_VALUE = [EMPTY_KEY_VALUE, EMPTY_KEY_VALUE] as Value; diff --git a/frontend/javascripts/libs/draco.ts b/frontend/javascripts/libs/draco.ts index f162431f7c6..31c66778a12 100644 --- a/frontend/javascripts/libs/draco.ts +++ b/frontend/javascripts/libs/draco.ts @@ -1,5 +1,5 @@ -import type { BufferGeometry } from "three"; import { DRACOLoader } from "libs/DRACOLoader"; +import type { BufferGeometry } from "three"; let _dracoLoader: CustomDRACOLoader | null; diff --git a/frontend/javascripts/libs/error_handling.ts b/frontend/javascripts/libs/error_handling.ts index d435b9faabf..20de33657fb 100644 --- a/frontend/javascripts/libs/error_handling.ts +++ b/frontend/javascripts/libs/error_handling.ts @@ -1,10 +1,10 @@ import { Notifier } from "@airbrake/browser"; +import Toast from "libs/toast"; +import window, { document, location } from "libs/window"; import _ from "lodash"; +import messages from "messages"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; import type { APIUser } from "types/api_flow_types"; -import Toast from "libs/toast"; -import messages from "messages"; -import window, { document, location } from "libs/window"; // Note that if you set this value to true for debugging airbrake reporting, // you also need to set the values for projectID and projectKey in application.conf const LOG_LOCAL_ERRORS = false; diff --git a/frontend/javascripts/libs/format_utils.ts b/frontend/javascripts/libs/format_utils.ts index 9b29e05edba..1454b77608c 100644 --- a/frontend/javascripts/libs/format_utils.ts +++ b/frontend/javascripts/libs/format_utils.ts @@ -1,21 +1,21 @@ import { presetPalettes } from "@ant-design/colors"; -import { LongUnitToShortUnitMap, UnitShort, type Vector3, type Vector6 } from "oxalis/constants"; -import { Unicode } from "oxalis/constants"; -import * as Utils from "libs/utils"; -import _ from "lodash"; import dayjs from "dayjs"; +import calendar from "dayjs/plugin/calendar"; +import customParseFormat from "dayjs/plugin/customParseFormat"; import duration from "dayjs/plugin/duration"; -import updateLocale from "dayjs/plugin/updateLocale"; -import relativeTime from "dayjs/plugin/relativeTime"; +import localeData from "dayjs/plugin/localeData"; import localizedFormat from "dayjs/plugin/localizedFormat"; -import customParseFormat from "dayjs/plugin/customParseFormat"; -import calendar from "dayjs/plugin/calendar"; +import relativeTime from "dayjs/plugin/relativeTime"; +import updateLocale from "dayjs/plugin/updateLocale"; import utc from "dayjs/plugin/utc"; import weekday from "dayjs/plugin/weekday"; -import localeData from "dayjs/plugin/localeData"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import { LongUnitToShortUnitMap, UnitShort, type Vector3, type Vector6 } from "oxalis/constants"; +import { Unicode } from "oxalis/constants"; -import type { BoundingBoxObject } from "oxalis/store"; import type { Duration } from "dayjs/plugin/duration"; +import type { BoundingBoxObject } from "oxalis/store"; import type { VoxelSize, WkLibsNdBoundingBox } from "types/api_flow_types"; dayjs.extend(updateLocale); diff --git a/frontend/javascripts/libs/gist.ts b/frontend/javascripts/libs/gist.ts index 666db1c0ea3..53bf58ebdf5 100644 --- a/frontend/javascripts/libs/gist.ts +++ b/frontend/javascripts/libs/gist.ts @@ -1,6 +1,6 @@ -import _ from "lodash"; import Request from "libs/request"; import Toast from "libs/toast"; +import _ from "lodash"; import messages from "messages"; // https://developer.github.com/v3/gists/#get-a-single-gist type GithubGist = { diff --git a/frontend/javascripts/libs/input.ts b/frontend/javascripts/libs/input.ts index daa55d0a1d9..8517cba45d3 100644 --- a/frontend/javascripts/libs/input.ts +++ b/frontend/javascripts/libs/input.ts @@ -1,13 +1,13 @@ -import _ from "lodash"; import Date from "libs/date"; import Hammer from "libs/hammerjs_wrapper"; // @ts-expect-error ts-migrate(2306) FIXME: ... Remove this comment to see the full error message import KeyboardJS from "libs/keyboard"; import * as Utils from "libs/utils"; +import window, { document } from "libs/window"; +import _ from "lodash"; +import { type Emitter, createNanoEvents } from "nanoevents"; import type { Point2 } from "oxalis/constants"; import constants from "oxalis/constants"; -import window, { document } from "libs/window"; -import { createNanoEvents, type Emitter } from "nanoevents"; // This is the main Input implementation. // Although all keys, buttons and sensor are mapped in // the controller, this is were the magic happens. diff --git a/frontend/javascripts/libs/persistence.ts b/frontend/javascripts/libs/persistence.ts index 07fe72b2d43..c7f02a5274f 100644 --- a/frontend/javascripts/libs/persistence.ts +++ b/frontend/javascripts/libs/persistence.ts @@ -1,6 +1,6 @@ import { PropTypes } from "@scalableminds/prop-types"; -import _ from "lodash"; import ErrorHandling from "libs/error_handling"; +import _ from "lodash"; import type { EmptyObject } from "types/globals"; class Persistence> { diff --git a/frontend/javascripts/libs/react_helpers.tsx b/frontend/javascripts/libs/react_helpers.tsx index bc02006603a..f9fe89f347b 100644 --- a/frontend/javascripts/libs/react_helpers.tsx +++ b/frontend/javascripts/libs/react_helpers.tsx @@ -1,9 +1,9 @@ +import { isUserAdminOrManager } from "libs/utils"; +import type { OxalisState } from "oxalis/store"; import type React from "react"; -import { useState, useEffect, useRef } from "react"; +import { useEffect, useRef, useState } from "react"; import { useSelector, useStore } from "react-redux"; -import type { OxalisState } from "oxalis/store"; import type { ArbitraryFunction } from "types/globals"; -import { isUserAdminOrManager } from "libs/utils"; import Toast from "./toast"; // From https://overreacted.io/making-setinterval-declarative-with-react-hooks/ diff --git a/frontend/javascripts/libs/react_hooks.ts b/frontend/javascripts/libs/react_hooks.ts index d9f0d9aa462..58e8e053c21 100644 --- a/frontend/javascripts/libs/react_hooks.ts +++ b/frontend/javascripts/libs/react_hooks.ts @@ -1,5 +1,5 @@ import constants from "oxalis/constants"; -import { useState, useEffect, useRef, useCallback } from "react"; +import { useCallback, useEffect, useRef, useState } from "react"; import { useLocation } from "react-router-dom"; import { KEYBOARD_BUTTON_LOOP_INTERVAL } from "./input"; diff --git a/frontend/javascripts/libs/render_independently.tsx b/frontend/javascripts/libs/render_independently.tsx index 275cdb8d6bd..ce4eedcc0e6 100644 --- a/frontend/javascripts/libs/render_independently.tsx +++ b/frontend/javascripts/libs/render_independently.tsx @@ -1,8 +1,8 @@ -import type React from "react"; import { document } from "libs/window"; +import type React from "react"; +import { createRoot } from "react-dom/client"; import { Provider } from "react-redux"; import GlobalThemeProvider from "theme"; -import { createRoot } from "react-dom/client"; type DestroyFunction = () => void; // The returned promise gets resolved once the element is destroyed. diff --git a/frontend/javascripts/libs/request.ts b/frontend/javascripts/libs/request.ts index 25bf31657e5..e92177c4683 100644 --- a/frontend/javascripts/libs/request.ts +++ b/frontend/javascripts/libs/request.ts @@ -1,13 +1,13 @@ +import { pingMentionedDataStores } from "admin/datastore_health_check"; +import handleStatus from "libs/handle_http_status"; +import Toast from "libs/toast"; import _ from "lodash"; -import urljoin from "url-join"; import { createWorker } from "oxalis/workers/comlink_wrapper"; -import { pingMentionedDataStores } from "admin/datastore_health_check"; import CompressWorker from "oxalis/workers/compress.worker"; -import FetchBufferWithHeadersWorker from "oxalis/workers/fetch_buffer_with_headers.worker"; import FetchBufferWorker from "oxalis/workers/fetch_buffer.worker"; -import Toast from "libs/toast"; -import handleStatus from "libs/handle_http_status"; +import FetchBufferWithHeadersWorker from "oxalis/workers/fetch_buffer_with_headers.worker"; import type { ArbitraryObject } from "types/globals"; +import urljoin from "url-join"; const fetchBufferViaWorker = createWorker(FetchBufferWorker); const fetchBufferWithHeaders = createWorker(FetchBufferWithHeadersWorker); diff --git a/frontend/javascripts/libs/shortcut_component.ts b/frontend/javascripts/libs/shortcut_component.ts index ddebdd87460..6d9ff1add0f 100644 --- a/frontend/javascripts/libs/shortcut_component.ts +++ b/frontend/javascripts/libs/shortcut_component.ts @@ -1,5 +1,5 @@ -import * as React from "react"; import { InputKeyboard, InputKeyboardNoLoop } from "libs/input"; +import * as React from "react"; // This component provides a lightweight wrapper around the input library. // It leverages reacts lifecycle hooks to allow rendering-sensitive activation of shortcuts. type Props = { diff --git a/frontend/javascripts/libs/toast.tsx b/frontend/javascripts/libs/toast.tsx index e0a7084fb5e..7a9d26f1ba5 100644 --- a/frontend/javascripts/libs/toast.tsx +++ b/frontend/javascripts/libs/toast.tsx @@ -1,5 +1,5 @@ -import { notification, Collapse } from "antd"; import { CloseCircleOutlined } from "@ant-design/icons"; +import { Collapse, notification } from "antd"; import type React from "react"; import { useEffect } from "react"; import { animationFrame, sleep } from "./utils"; diff --git a/frontend/javascripts/libs/trackball_controls.ts b/frontend/javascripts/libs/trackball_controls.ts index 2208aaa0ecd..f6d00abd4b3 100644 --- a/frontend/javascripts/libs/trackball_controls.ts +++ b/frontend/javascripts/libs/trackball_controls.ts @@ -1,5 +1,5 @@ -import * as THREE from "three"; import window, { document } from "libs/window"; +import * as THREE from "three"; /** * The MIT License diff --git a/frontend/javascripts/libs/utils.ts b/frontend/javascripts/libs/utils.ts index 86f82a77b08..9f4dca49bb9 100644 --- a/frontend/javascripts/libs/utils.ts +++ b/frontend/javascripts/libs/utils.ts @@ -1,21 +1,21 @@ import Maybe from "data.maybe"; -import _ from "lodash"; +import dayjs from "dayjs"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'java... Remove this comment to see the full error message import naturalSort from "javascript-natural-sort"; -import type { APIDataset, APIUser } from "types/api_flow_types"; -import type { BoundingBoxObject, NumberLike } from "oxalis/store"; +import window, { document, location } from "libs/window"; +import _ from "lodash"; import type { - Vector3, - Vector4, - Vector6, BoundingBoxType, - Point3, ColorObject, + Point3, TypedArray, + Vector3, + Vector4, + Vector6, } from "oxalis/constants"; -import window, { document, location } from "libs/window"; +import type { BoundingBoxObject, NumberLike } from "oxalis/store"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import type { ArbitraryObject, Comparator } from "types/globals"; -import dayjs from "dayjs"; type UrlParams = Record; // Fix JS modulo bug diff --git a/frontend/javascripts/libs/vector_input.tsx b/frontend/javascripts/libs/vector_input.tsx index 3382db9a711..446d1a94513 100644 --- a/frontend/javascripts/libs/vector_input.tsx +++ b/frontend/javascripts/libs/vector_input.tsx @@ -1,10 +1,10 @@ -import * as React from "react"; +import type { InputProps } from "antd"; +import * as Utils from "libs/utils"; import _ from "lodash"; -import type { ServerBoundingBoxTypeTuple } from "types/api_flow_types"; import type { Vector3, Vector6 } from "oxalis/constants"; import InputComponent from "oxalis/view/components/input_component"; -import * as Utils from "libs/utils"; -import type { InputProps } from "antd"; +import * as React from "react"; +import type { ServerBoundingBoxTypeTuple } from "types/api_flow_types"; const CHARACTER_WIDTH_PX = 8; diff --git a/frontend/javascripts/libs/window.ts b/frontend/javascripts/libs/window.ts index d629a1e9b3d..2435d4d64a6 100644 --- a/frontend/javascripts/libs/window.ts +++ b/frontend/javascripts/libs/window.ts @@ -1,7 +1,7 @@ // This module should be used to access the window object, so it can be mocked in the unit tests -import type { ArbitraryFunction, ArbitraryObject } from "types/globals"; import type TextureBucketManager from "oxalis/model/bucket_data_handling/texture_bucket_manager"; +import type { ArbitraryFunction, ArbitraryObject } from "types/globals"; // mockRequire("libs/window", myFakeWindow); const removeEventListener = ( diff --git a/frontend/javascripts/main.tsx b/frontend/javascripts/main.tsx index 31d59217e44..9a8bcb6d278 100644 --- a/frontend/javascripts/main.tsx +++ b/frontend/javascripts/main.tsx @@ -1,31 +1,31 @@ -import { Provider } from "react-redux"; -import { createRoot } from "react-dom/client"; +import { message } from "antd"; import window, { document } from "libs/window"; import rootSaga from "oxalis/model/sagas/root_saga"; import UnthrottledStore, { startSagas } from "oxalis/store"; -import { message } from "antd"; +import { createRoot } from "react-dom/client"; +import { Provider } from "react-redux"; -import { getActiveUser, checkAnyOrganizationExists, getOrganization } from "admin/admin_rest_api"; -import { load as loadFeatureToggles } from "features"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import { setHasOrganizationsAction, setThemeAction } from "oxalis/model/actions/ui_actions"; -import ErrorHandling from "libs/error_handling"; -import Router from "router"; -import Store from "oxalis/throttled_store"; -import { DndProvider } from "react-dnd"; -import { HTML5Backend } from "react-dnd-html5-backend"; +import { createSyncStoragePersister } from "@tanstack/query-sync-storage-persister"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; import { persistQueryClient } from "@tanstack/react-query-persist-client"; -import { createSyncStoragePersister } from "@tanstack/query-sync-storage-persister"; +import { checkAnyOrganizationExists, getActiveUser, getOrganization } from "admin/admin_rest_api"; +import ErrorBoundary from "components/error_boundary"; +import { RootForFastTooltips } from "components/fast_tooltip"; +import { load as loadFeatureToggles } from "features"; +import checkBrowserFeatures from "libs/browser_feature_check"; +import ErrorHandling from "libs/error_handling"; import UserLocalStorage from "libs/user_local_storage"; import { compress, decompress } from "lz-string"; -import ErrorBoundary from "components/error_boundary"; -import { setStore, setModel } from "oxalis/singletons"; -import Model from "oxalis/model"; import { setupApi } from "oxalis/api/internal_api"; +import Model from "oxalis/model"; import { setActiveOrganizationAction } from "oxalis/model/actions/organization_actions"; -import checkBrowserFeatures from "libs/browser_feature_check"; -import { RootForFastTooltips } from "components/fast_tooltip"; +import { setHasOrganizationsAction, setThemeAction } from "oxalis/model/actions/ui_actions"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import { setModel, setStore } from "oxalis/singletons"; +import Store from "oxalis/throttled_store"; +import { DndProvider } from "react-dnd"; +import { HTML5Backend } from "react-dnd-html5-backend"; +import Router from "router"; import "../stylesheets/main.less"; import GlobalThemeProvider, { getThemeFromUser } from "theme"; diff --git a/frontend/javascripts/messages.tsx b/frontend/javascripts/messages.tsx index 98ef35199a2..96e2782a5e6 100644 --- a/frontend/javascripts/messages.tsx +++ b/frontend/javascripts/messages.tsx @@ -2,8 +2,8 @@ import _ from "lodash"; import type { Vector4 } from "oxalis/constants"; import type { DatasetConfiguration, - UserConfiguration, DatasetLayerConfiguration, + UserConfiguration, } from "oxalis/store"; export type RecommendedConfiguration = Partial< diff --git a/frontend/javascripts/navbar.tsx b/frontend/javascripts/navbar.tsx index 12873f12c91..2fb5357fbc6 100644 --- a/frontend/javascripts/navbar.tsx +++ b/frontend/javascripts/navbar.tsx @@ -1,73 +1,73 @@ -import type React from "react"; -import { useState, useEffect, useRef } from "react"; +import { + BarChartOutlined, + BellOutlined, + CheckOutlined, + HomeOutlined, + QuestionCircleOutlined, + SwapOutlined, + TeamOutlined, + UserOutlined, +} from "@ant-design/icons"; import { Avatar, - Button, Badge, - Tooltip, + Button, + ConfigProvider, + Input, + type InputRef, Layout, Menu, Popover, type SubMenuProps, Tag, - Input, - type InputRef, - ConfigProvider, + Tooltip, } from "antd"; -import { - SwapOutlined, - TeamOutlined, - CheckOutlined, - BarChartOutlined, - HomeOutlined, - QuestionCircleOutlined, - UserOutlined, - BellOutlined, -} from "@ant-design/icons"; -import { useHistory, Link } from "react-router-dom"; import classnames from "classnames"; +import type React from "react"; +import { useEffect, useRef, useState } from "react"; import { connect, useSelector } from "react-redux"; +import { Link, useHistory } from "react-router-dom"; -import Toast from "libs/toast"; -import type { - APIOrganizationCompact, - APIUser, - APIUserCompact, - APIUserTheme, -} from "types/api_flow_types"; -import { PortalTarget } from "oxalis/view/layouting/portal_utils"; import { getBuildInfo, getUsersOrganizations, + sendAnalyticsEvent, switchToOrganization, - updateSelectedThemeOfUser, updateNovelUserExperienceInfos, - sendAnalyticsEvent, + updateSelectedThemeOfUser, } from "admin/admin_rest_api"; -import { logoutUserAction, setActiveUserAction } from "oxalis/model/actions/user_actions"; -import { useFetch, useInterval } from "libs/react_helpers"; import LoginForm from "admin/auth/login_form"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import type { ItemType, MenuItemType, SubMenuType } from "antd/es/menu/interface"; +import { MaintenanceBanner, UpgradeVersionBanner } from "banners"; +import { PricingEnforcedSpan } from "components/pricing_enforcers"; +import features from "features"; +import { useFetch, useInterval } from "libs/react_helpers"; import Request from "libs/request"; -import type { OxalisState } from "oxalis/store"; -import Store from "oxalis/store"; +import Toast from "libs/toast"; import * as Utils from "libs/utils"; import window, { location } from "libs/window"; -import features from "features"; -import { setThemeAction } from "oxalis/model/actions/ui_actions"; -import { HelpModal } from "oxalis/view/help_modal"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import messages from "messages"; -import { PricingEnforcedSpan } from "components/pricing_enforcers"; -import type { ItemType, MenuItemType, SubMenuType } from "antd/es/menu/interface"; -import type { MenuClickEventHandler } from "rc-menu/lib/interface"; import constants from "oxalis/constants"; -import { MaintenanceBanner, UpgradeVersionBanner } from "banners"; -import { getAntdTheme, getSystemColorTheme } from "theme"; -import { formatUserName } from "oxalis/model/accessors/user_accessor"; import { isAnnotationFromDifferentOrganization, isAnnotationOwner as isAnnotationOwnerAccessor, } from "oxalis/model/accessors/annotation_accessor"; +import { formatUserName } from "oxalis/model/accessors/user_accessor"; +import { setThemeAction } from "oxalis/model/actions/ui_actions"; +import { logoutUserAction, setActiveUserAction } from "oxalis/model/actions/user_actions"; +import type { OxalisState } from "oxalis/store"; +import Store from "oxalis/store"; +import { HelpModal } from "oxalis/view/help_modal"; +import { PortalTarget } from "oxalis/view/layouting/portal_utils"; +import type { MenuClickEventHandler } from "rc-menu/lib/interface"; +import { getAntdTheme, getSystemColorTheme } from "theme"; +import type { + APIOrganizationCompact, + APIUser, + APIUserCompact, + APIUserTheme, +} from "types/api_flow_types"; const { Header } = Layout; diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index e6435bca856..4c02316df52 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -1,64 +1,73 @@ -import PriorityQueue from "js-priority-queue"; -// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message -import TWEEN from "tween.js"; -import _ from "lodash"; -import type { Bucket, DataBucket } from "oxalis/model/bucket_data_handling/bucket"; -import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling/bucket"; -import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_flow_types"; -import { InputKeyboardNoLoop } from "libs/input"; -import { M4x4, type Matrix4x4, V3, type Vector16 } from "libs/mjs"; -import { - addTreesAndGroupsAction, - setActiveNodeAction, - createCommentAction, - deleteNodeAction, - centerActiveNodeAction, - deleteTreeAction, - resetSkeletonTracingAction, - setNodeRadiusAction, - setTreeNameAction, - setActiveTreeAction, - setActiveTreeGroupAction, - setActiveTreeByNameAction, - setTreeColorIndexAction, - setTreeVisibilityAction, - setTreeGroupAction, - setTreeGroupsAction, - setTreeEdgeVisibilityAction, - createTreeAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { - bucketPositionToGlobalAddress, - globalPositionToBucketPosition, - scaleGlobalPositionWithMagnification, - zoomedAddressToZoomedPosition, -} from "oxalis/model/helpers/position_converter"; -import { - callDeep, - createGroupToSegmentsMap, - MISSING_GROUP_ID, - moveGroupsHelper, -} from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import { centerTDViewAction } from "oxalis/model/actions/view_mode_actions"; -import { disableSavingAction, discardSaveQueuesAction } from "oxalis/model/actions/save_actions"; import { doWithToken, finishAnnotation, getMappingsForDatasetLayer, sendAnalyticsEvent, } from "admin/admin_rest_api"; +import { requestTask } from "admin/api/tasks"; +import PriorityQueue from "js-priority-queue"; +import { InputKeyboardNoLoop } from "libs/input"; +import { M4x4, type Matrix4x4, V3, type Vector16 } from "libs/mjs"; +import Request from "libs/request"; +import type { ToastStyle } from "libs/toast"; +import Toast from "libs/toast"; +import UserLocalStorage from "libs/user_local_storage"; +import * as Utils from "libs/utils"; +import { coalesce } from "libs/utils"; +import window, { location } from "libs/window"; +import _ from "lodash"; +import messages from "messages"; +import type { + AnnotationTool, + BoundingBoxType, + BucketAddress, + ControlMode, + OrthoView, + TypedArray, + Vector3, + Vector4, +} from "oxalis/constants"; +import Constants, { + ControlModeEnum, + OrthoViews, + AnnotationToolEnum, + TDViewDisplayModeEnum, + MappingStatusEnum, + EMPTY_OBJECT, +} from "oxalis/constants"; +import { rotate3DViewTo } from "oxalis/controller/camera_controller"; +import { loadAgglomerateSkeletonForSegmentId } from "oxalis/controller/combinations/segmentation_handlers"; +import { + createSkeletonNode, + getOptionsForCreateSkeletonNode, +} from "oxalis/controller/combinations/skeleton_handlers"; +import UrlManager from "oxalis/controller/url_manager"; +import type { OxalisModel } from "oxalis/model"; +import { + flatToNestedMatrix, + getLayerBoundingBox, + getLayerByName, + getMagInfo, + getMappingInfo, + getVisibleSegmentationLayer, +} from "oxalis/model/accessors/dataset_accessor"; +import { + getActiveMagIndexForLayer, + getPosition, + getRotation, +} from "oxalis/model/accessors/flycam_accessor"; import { findTreeByNodeId, - getNodeAndTree, - getNodeAndTreeOrNull, getActiveNode, getActiveTree, getActiveTreeGroup, - getTree, getFlatTreeGroups, + getNodeAndTree, + getNodeAndTreeOrNull, + getNodePosition, + getTree, getTreeGroupsMap, mapGroups, - getNodePosition, } from "oxalis/model/accessors/skeletontracing_accessor"; import { getActiveCellId, @@ -75,28 +84,50 @@ import { getVolumeTracings, hasVolumeTracings, } from "oxalis/model/accessors/volumetracing_accessor"; -import { getHalfViewportExtentsInUnitFromState } from "oxalis/model/sagas/saga_selectors"; -import { - getLayerBoundingBox, - getLayerByName, - getMagInfo, - getVisibleSegmentationLayer, - getMappingInfo, - flatToNestedMatrix, -} from "oxalis/model/accessors/dataset_accessor"; +import { restartSagaAction, wkReadyAction } from "oxalis/model/actions/actions"; import { - getPosition, - getActiveMagIndexForLayer, - getRotation, -} from "oxalis/model/accessors/flycam_accessor"; + dispatchMaybeFetchMeshFilesAsync, + refreshMeshesAction, + removeMeshAction, + updateCurrentMeshFileAction, + updateMeshVisibilityAction, +} from "oxalis/model/actions/annotation_actions"; +import { setLayerTransformsAction } from "oxalis/model/actions/dataset_actions"; +import { setPositionAction, setRotationAction } from "oxalis/model/actions/flycam_actions"; +import { disableSavingAction, discardSaveQueuesAction } from "oxalis/model/actions/save_actions"; import { loadAdHocMeshAction, loadPrecomputedMeshAction, } from "oxalis/model/actions/segmentation_actions"; -import { loadAgglomerateSkeletonForSegmentId } from "oxalis/controller/combinations/segmentation_handlers"; -import { overwriteAction } from "oxalis/model/helpers/overwrite_action_middleware"; -import { parseNml } from "oxalis/model/helpers/nml_helpers"; -import { rotate3DViewTo } from "oxalis/controller/camera_controller"; +import { + setMappingAction, + setMappingEnabledAction, + updateDatasetSettingAction, + updateLayerSettingAction, + updateUserSettingAction, +} from "oxalis/model/actions/settings_actions"; +import { + addTreesAndGroupsAction, + centerActiveNodeAction, + createCommentAction, + createTreeAction, + deleteNodeAction, + deleteTreeAction, + resetSkeletonTracingAction, + setActiveNodeAction, + setActiveTreeAction, + setActiveTreeByNameAction, + setActiveTreeGroupAction, + setNodeRadiusAction, + setTreeColorIndexAction, + setTreeEdgeVisibilityAction, + setTreeGroupAction, + setTreeGroupsAction, + setTreeNameAction, + setTreeVisibilityAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { setToolAction } from "oxalis/model/actions/ui_actions"; +import { centerTDViewAction } from "oxalis/model/actions/view_mode_actions"; import { type BatchableUpdateSegmentAction, batchUpdateGroupsAndSegmentsAction, @@ -106,80 +137,49 @@ import { setSegmentGroupsAction, updateSegmentAction, } from "oxalis/model/actions/volumetracing_actions"; -import { setPositionAction, setRotationAction } from "oxalis/model/actions/flycam_actions"; -import { setToolAction } from "oxalis/model/actions/ui_actions"; -import { - updateCurrentMeshFileAction, - refreshMeshesAction, - updateMeshVisibilityAction, - removeMeshAction, - dispatchMaybeFetchMeshFilesAsync, -} from "oxalis/model/actions/annotation_actions"; -import { - updateUserSettingAction, - updateDatasetSettingAction, - updateLayerSettingAction, - setMappingAction, - setMappingEnabledAction, -} from "oxalis/model/actions/settings_actions"; -import { wkReadyAction, restartSagaAction } from "oxalis/model/actions/actions"; -import type { - BoundingBoxType, - ControlMode, - OrthoView, - Vector3, - Vector4, - AnnotationTool, - TypedArray, - BucketAddress, -} from "oxalis/constants"; -import Constants, { - ControlModeEnum, - OrthoViews, - AnnotationToolEnum, - TDViewDisplayModeEnum, - MappingStatusEnum, - EMPTY_OBJECT, -} from "oxalis/constants"; +import type { Bucket, DataBucket } from "oxalis/model/bucket_data_handling/bucket"; +import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling/bucket"; import type DataLayer from "oxalis/model/data_layer"; -import type { OxalisModel } from "oxalis/model"; +import dimensions from "oxalis/model/dimensions"; +import { MagInfo } from "oxalis/model/helpers/mag_info"; +import { parseNml } from "oxalis/model/helpers/nml_helpers"; +import { overwriteAction } from "oxalis/model/helpers/overwrite_action_middleware"; +import { + bucketPositionToGlobalAddress, + globalPositionToBucketPosition, + scaleGlobalPositionWithMagnification, + zoomedAddressToZoomedPosition, +} from "oxalis/model/helpers/position_converter"; +import { getMaximumGroupId } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; +import { getHalfViewportExtentsInUnitFromState } from "oxalis/model/sagas/saga_selectors"; import { Model, api } from "oxalis/singletons"; -import Request from "libs/request"; import type { - MappingType, DatasetConfiguration, Mapping, + MappingType, + MutableNode, Node, + OxalisState, + Segment, + SegmentGroup, SkeletonTracing, Tracing, TreeGroupTypeFlat, TreeMap, UserConfiguration, VolumeTracing, - OxalisState, - SegmentGroup, - Segment, - MutableNode, } from "oxalis/store"; import Store from "oxalis/store"; -import type { ToastStyle } from "libs/toast"; -import Toast from "libs/toast"; -import UrlManager from "oxalis/controller/url_manager"; -import UserLocalStorage from "libs/user_local_storage"; -import * as Utils from "libs/utils"; -import dimensions from "oxalis/model/dimensions"; -import messages from "messages"; -import window, { location } from "libs/window"; -import { coalesce } from "libs/utils"; -import { setLayerTransformsAction } from "oxalis/model/actions/dataset_actions"; -import { MagInfo } from "oxalis/model/helpers/mag_info"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import { getMaximumGroupId } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - createSkeletonNode, - getOptionsForCreateSkeletonNode, -} from "oxalis/controller/combinations/skeleton_handlers"; -import { requestTask } from "admin/api/tasks"; + MISSING_GROUP_ID, + callDeep, + createGroupToSegmentsMap, + moveGroupsHelper, +} from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; +// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message +import TWEEN from "tween.js"; +import { type APICompoundType, APICompoundTypeEnum, type ElementClass } from "types/api_flow_types"; +import type { AdditionalCoordinate } from "types/api_flow_types"; type TransformSpec = | { type: "scale"; args: [Vector3, Vector3] } diff --git a/frontend/javascripts/oxalis/api/api_loader.ts b/frontend/javascripts/oxalis/api/api_loader.ts index 7bb54b9d3c9..3ece3a89cb2 100644 --- a/frontend/javascripts/oxalis/api/api_loader.ts +++ b/frontend/javascripts/oxalis/api/api_loader.ts @@ -1,6 +1,6 @@ +import app from "app"; // only relative imports are followed by documentationjs import type { OxalisModel } from "oxalis/model"; -import app from "app"; import createApiLatest, { type ApiInterface } from "./api_latest"; import WkDev from "./wk_dev"; const latestVersion = 3; diff --git a/frontend/javascripts/oxalis/api/cross_origin_api.ts b/frontend/javascripts/oxalis/api/cross_origin_api.ts index 22102cd4019..0c267fe8d8d 100644 --- a/frontend/javascripts/oxalis/api/cross_origin_api.ts +++ b/frontend/javascripts/oxalis/api/cross_origin_api.ts @@ -1,6 +1,6 @@ -import { useEffect } from "react"; import _ from "lodash"; import { api } from "oxalis/singletons"; +import { useEffect } from "react"; // This component allows cross origin communication, for example, between a host page // and an embedded webKnossos iframe. diff --git a/frontend/javascripts/oxalis/api/internal_api.ts b/frontend/javascripts/oxalis/api/internal_api.ts index a53a073e995..0610808523a 100644 --- a/frontend/javascripts/oxalis/api/internal_api.ts +++ b/frontend/javascripts/oxalis/api/internal_api.ts @@ -1,7 +1,7 @@ +import createApi from "oxalis/api/api_latest"; // This module exposes the api for internal usage, so that we don't have to // deal with versioning, creation and waiting of/for the api. import { Model, setApi } from "oxalis/singletons"; -import createApi from "oxalis/api/api_latest"; export function setupApi() { const api = createApi(Model); diff --git a/frontend/javascripts/oxalis/api/wk_dev.ts b/frontend/javascripts/oxalis/api/wk_dev.ts index bd8cb397adf..90d7541462e 100644 --- a/frontend/javascripts/oxalis/api/wk_dev.ts +++ b/frontend/javascripts/oxalis/api/wk_dev.ts @@ -1,11 +1,11 @@ -import { Store } from "oxalis/singletons"; -import type { Vector3 } from "oxalis/constants"; +import showFpsMeter from "libs/fps_meter"; import { V3 } from "libs/mjs"; import { roundTo, sleep } from "libs/utils"; -import type ApiLoader from "./api_loader"; -import type { ApiInterface } from "./api_latest"; -import showFpsMeter from "libs/fps_meter"; import _ from "lodash"; +import type { Vector3 } from "oxalis/constants"; +import { Store } from "oxalis/singletons"; +import type { ApiInterface } from "./api_latest"; +import type ApiLoader from "./api_loader"; // Can be accessed via window.webknossos.DEV.flags. Only use this // for debugging or one off scripts. diff --git a/frontend/javascripts/oxalis/controller.tsx b/frontend/javascripts/oxalis/controller.tsx index 537977d8e9d..cd93f8d680e 100644 --- a/frontend/javascripts/oxalis/controller.tsx +++ b/frontend/javascripts/oxalis/controller.tsx @@ -1,34 +1,34 @@ -import type { RouteComponentProps } from "react-router-dom"; -import { withRouter } from "react-router-dom"; -import { connect } from "react-redux"; -import type { Location as HistoryLocation, Action as HistoryAction } from "history"; -import * as React from "react"; -import _ from "lodash"; -import { APIAnnotationTypeEnum, type APICompoundType } from "types/api_flow_types"; -import { HANDLED_ERROR } from "oxalis/model_initialization"; -import { InputKeyboardNoLoop } from "libs/input"; +import app from "app"; +import BrainSpinner, { BrainSpinnerWithError, CoverWithLogin } from "components/brain_spinner"; +import type { Action as HistoryAction, Location as HistoryLocation } from "history"; import { fetchGistContent } from "libs/gist"; -import { initializeSceneController } from "oxalis/controller/scene_controller"; -import { saveNowAction, undoAction, redoAction } from "oxalis/model/actions/save_actions"; -import { setIsInAnnotationViewAction } from "oxalis/model/actions/ui_actions"; -import { setViewModeAction, updateLayerSettingAction } from "oxalis/model/actions/settings_actions"; -import { wkReadyAction } from "oxalis/model/actions/actions"; +import { InputKeyboardNoLoop } from "libs/input"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import window, { document, location } from "libs/window"; +import _ from "lodash"; +import messages from "messages"; import ApiLoader from "oxalis/api/api_loader"; +import type { ViewMode } from "oxalis/constants"; +import constants, { ControlModeEnum } from "oxalis/constants"; +import { initializeSceneController } from "oxalis/controller/scene_controller"; +import UrlManager from "oxalis/controller/url_manager"; import ArbitraryController from "oxalis/controller/viewmodes/arbitrary_controller"; -import BrainSpinner, { BrainSpinnerWithError, CoverWithLogin } from "components/brain_spinner"; -import { Model } from "oxalis/singletons"; import PlaneController from "oxalis/controller/viewmodes/plane_controller"; +import { wkReadyAction } from "oxalis/model/actions/actions"; +import { redoAction, saveNowAction, undoAction } from "oxalis/model/actions/save_actions"; +import { setViewModeAction, updateLayerSettingAction } from "oxalis/model/actions/settings_actions"; +import { setIsInAnnotationViewAction } from "oxalis/model/actions/ui_actions"; +import { HANDLED_ERROR } from "oxalis/model_initialization"; +import { Model } from "oxalis/singletons"; import type { OxalisState, TraceOrViewCommand } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import UrlManager from "oxalis/controller/url_manager"; -import * as Utils from "libs/utils"; -import type { APIUser, APIOrganization } from "types/api_flow_types"; -import app from "app"; -import type { ViewMode } from "oxalis/constants"; -import constants, { ControlModeEnum } from "oxalis/constants"; -import messages from "messages"; -import window, { document, location } from "libs/window"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { RouteComponentProps } from "react-router-dom"; +import { withRouter } from "react-router-dom"; +import { APIAnnotationTypeEnum, type APICompoundType } from "types/api_flow_types"; +import type { APIOrganization, APIUser } from "types/api_flow_types"; import type DataLayer from "./model/data_layer"; export type ControllerStatus = "loading" | "loaded" | "failedLoading"; diff --git a/frontend/javascripts/oxalis/controller/camera_controller.ts b/frontend/javascripts/oxalis/controller/camera_controller.ts index 122071b43aa..37f2821c766 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.ts +++ b/frontend/javascripts/oxalis/controller/camera_controller.ts @@ -1,24 +1,24 @@ -import * as React from "react"; -import * as THREE from "three"; -// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message -import TWEEN from "tween.js"; -import _ from "lodash"; +import { V3 } from "libs/mjs"; import * as Utils from "libs/utils"; +import _ from "lodash"; import type { OrthoView, OrthoViewMap, OrthoViewRects, Vector3 } from "oxalis/constants"; import { OrthoViewValuesWithoutTDView, OrthoViews } from "oxalis/constants"; -import { V3 } from "libs/mjs"; -import { getDatasetExtentInUnit, getDatasetCenter } from "oxalis/model/accessors/dataset_accessor"; +import { getDatasetCenter, getDatasetExtentInUnit } from "oxalis/model/accessors/dataset_accessor"; +import { getPosition } from "oxalis/model/accessors/flycam_accessor"; import { getInputCatcherAspectRatio, getPlaneExtentInVoxelFromStore, } from "oxalis/model/accessors/view_mode_accessor"; -import { getPosition } from "oxalis/model/accessors/flycam_accessor"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { setTDCameraWithoutTimeTrackingAction } from "oxalis/model/actions/view_mode_actions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { getBaseVoxelInUnit, voxelToUnit } from "oxalis/model/scaleinfo"; +import { api } from "oxalis/singletons"; import type { CameraData } from "oxalis/store"; import Store from "oxalis/store"; -import { api } from "oxalis/singletons"; +import * as React from "react"; +import * as THREE from "three"; +// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message +import TWEEN from "tween.js"; type Props = { cameras: OrthoViewMap; diff --git a/frontend/javascripts/oxalis/controller/combinations/bounding_box_handlers.ts b/frontend/javascripts/oxalis/controller/combinations/bounding_box_handlers.ts index ea389aaeec3..1a5513fc447 100644 --- a/frontend/javascripts/oxalis/controller/combinations/bounding_box_handlers.ts +++ b/frontend/javascripts/oxalis/controller/combinations/bounding_box_handlers.ts @@ -1,22 +1,22 @@ +import { V3 } from "libs/mjs"; +import { document } from "libs/window"; +import _ from "lodash"; +import type { BoundingBoxType, OrthoView, Point2, Vector2, Vector3 } from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { calculateGlobalDelta, calculateGlobalPos, calculateMaybeGlobalPos, } from "oxalis/model/accessors/view_mode_accessor"; -import _ from "lodash"; -import type { OrthoView, Point2, Vector3, BoundingBoxType, Vector2 } from "oxalis/constants"; -import Store, { type OxalisState, type UserBoundingBox } from "oxalis/store"; -import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; -import type { DimensionMap, DimensionIndices } from "oxalis/model/dimensions"; -import Dimension from "oxalis/model/dimensions"; import { addUserBoundingBoxAction, changeUserBoundingBoxAction, } from "oxalis/model/actions/annotation_actions"; +import type { DimensionIndices, DimensionMap } from "oxalis/model/dimensions"; +import Dimension from "oxalis/model/dimensions"; import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { document } from "libs/window"; -import { V3 } from "libs/mjs"; +import Store, { type OxalisState, type UserBoundingBox } from "oxalis/store"; const BOUNDING_BOX_HOVERING_THROTTLE_TIME = 100; const getNeighbourEdgeIndexByEdgeIndex: { [key: number]: Vector2 } = { diff --git a/frontend/javascripts/oxalis/controller/combinations/move_handlers.ts b/frontend/javascripts/oxalis/controller/combinations/move_handlers.ts index 1029b08f6f1..ae0bef4ed85 100644 --- a/frontend/javascripts/oxalis/controller/combinations/move_handlers.ts +++ b/frontend/javascripts/oxalis/controller/combinations/move_handlers.ts @@ -1,17 +1,17 @@ -import Store from "oxalis/store"; -import type { Point2, Vector3, OrthoView } from "oxalis/constants"; -import { OrthoViews, OrthoViewValuesWithoutTDView } from "oxalis/constants"; -import Dimensions from "oxalis/model/dimensions"; -import { getInputCatcherRect, calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; +import type { OrthoView, Point2, Vector3 } from "oxalis/constants"; +import { OrthoViewValuesWithoutTDView, OrthoViews } from "oxalis/constants"; import { is2dDataset } from "oxalis/model/accessors/dataset_accessor"; +import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; +import { calculateGlobalPos, getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; import { - movePlaneFlycamOrthoAction, moveFlycamOrthoAction, + movePlaneFlycamOrthoAction, zoomByDeltaAction, } from "oxalis/model/actions/flycam_actions"; import { setViewportAction, zoomTDViewAction } from "oxalis/model/actions/view_mode_actions"; -import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; import { setMousePositionAction } from "oxalis/model/actions/volumetracing_actions"; +import Dimensions from "oxalis/model/dimensions"; +import Store from "oxalis/store"; export function setMousePosition(position: Point2 | null | undefined): void { if (position != null) { diff --git a/frontend/javascripts/oxalis/controller/combinations/segmentation_handlers.ts b/frontend/javascripts/oxalis/controller/combinations/segmentation_handlers.ts index d8e21ac7c16..9b21e6da42b 100644 --- a/frontend/javascripts/oxalis/controller/combinations/segmentation_handlers.ts +++ b/frontend/javascripts/oxalis/controller/combinations/segmentation_handlers.ts @@ -1,21 +1,21 @@ -import type { Point2, Vector3 } from "oxalis/constants"; -import { Model } from "oxalis/singletons"; -import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; -import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; -import { loadAgglomerateSkeletonAction } from "oxalis/model/actions/skeletontracing_actions"; -import Store from "oxalis/store"; import Toast from "libs/toast"; -import { clickSegmentAction } from "oxalis/model/actions/volumetracing_actions"; +import type { Point2, Vector3 } from "oxalis/constants"; import { getSegmentIdForPosition, getSegmentIdForPositionAsync, } from "oxalis/controller/combinations/volume_handlers"; -import { setActiveConnectomeAgglomerateIdsAction } from "oxalis/model/actions/connectome_actions"; +import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; import { getTreeNameForAgglomerateSkeleton } from "oxalis/model/accessors/skeletontracing_accessor"; +import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; import { hasAgglomerateMapping, hasConnectomeFile, } from "oxalis/model/accessors/volumetracing_accessor"; +import { setActiveConnectomeAgglomerateIdsAction } from "oxalis/model/actions/connectome_actions"; +import { loadAgglomerateSkeletonAction } from "oxalis/model/actions/skeletontracing_actions"; +import { clickSegmentAction } from "oxalis/model/actions/volumetracing_actions"; +import { Model } from "oxalis/singletons"; +import Store from "oxalis/store"; export async function handleAgglomerateSkeletonAtClick(clickPosition: Point2) { const state = Store.getState(); diff --git a/frontend/javascripts/oxalis/controller/combinations/skeleton_handlers.ts b/frontend/javascripts/oxalis/controller/combinations/skeleton_handlers.ts index f77a3af4640..c5f935cb52f 100644 --- a/frontend/javascripts/oxalis/controller/combinations/skeleton_handlers.ts +++ b/frontend/javascripts/oxalis/controller/combinations/skeleton_handlers.ts @@ -1,52 +1,52 @@ -import * as THREE from "three"; -import type { OrthoView, OrthoViewMap, Point2, Vector3, Viewport } from "oxalis/constants"; -import { OrthoViews } from "oxalis/constants"; import { V3 } from "libs/mjs"; -import _ from "lodash"; import { values } from "libs/utils"; +import _ from "lodash"; +import type { OrthoView, OrthoViewMap, Point2, Vector3, Viewport } from "oxalis/constants"; +import { OrthoViews } from "oxalis/constants"; +import { getClosestHoveredBoundingBox } from "oxalis/controller/combinations/bounding_box_handlers"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import { getEnabledColorLayers } from "oxalis/model/accessors/dataset_accessor"; +import { + getActiveMagIndicesForLayers, + getPosition, + getRotationOrtho, + isMagRestrictionViolated, +} from "oxalis/model/accessors/flycam_accessor"; import { enforceSkeletonTracing, - getSkeletonTracing, getActiveNode, getNodeAndTree, getNodeAndTreeOrNull, getNodePosition, + getSkeletonTracing, untransformNodePosition, } from "oxalis/model/accessors/skeletontracing_accessor"; import { - getInputCatcherRect, calculateGlobalPos, calculateMaybeGlobalPos, + getInputCatcherRect, } from "oxalis/model/accessors/view_mode_accessor"; +import { setDirectionAction } from "oxalis/model/actions/flycam_actions"; import { - getActiveMagIndicesForLayers, - getPosition, - getRotationOrtho, - isMagRestrictionViolated, -} from "oxalis/model/accessors/flycam_accessor"; -import { - setActiveNodeAction, - deleteEdgeAction, - createTreeAction, - createNodeAction, createBranchPointAction, + createNodeAction, + createTreeAction, + deleteEdgeAction, mergeTreesAction, + setActiveNodeAction, setNodePositionAction, updateNavigationListAction, } from "oxalis/model/actions/skeletontracing_actions"; -import { setDirectionAction } from "oxalis/model/actions/flycam_actions"; -import type PlaneView from "oxalis/view/plane_view"; -import Store from "oxalis/store"; -import type { Edge, Tree, Node } from "oxalis/store"; -import { api } from "oxalis/singletons"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { renderToTexture } from "oxalis/view/rendering_utils"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import { showContextMenuAction } from "oxalis/model/actions/ui_actions"; import Dimensions from "oxalis/model/dimensions"; -import { getClosestHoveredBoundingBox } from "oxalis/controller/combinations/bounding_box_handlers"; -import { getEnabledColorLayers } from "oxalis/model/accessors/dataset_accessor"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import { api } from "oxalis/singletons"; +import Store from "oxalis/store"; +import type { Edge, Node, Tree } from "oxalis/store"; import type ArbitraryView from "oxalis/view/arbitrary_view"; -import { showContextMenuAction } from "oxalis/model/actions/ui_actions"; +import type PlaneView from "oxalis/view/plane_view"; +import { renderToTexture } from "oxalis/view/rendering_utils"; +import * as THREE from "three"; import type { AdditionalCoordinate } from "types/api_flow_types"; const OrthoViewToNumber: OrthoViewMap = { [OrthoViews.PLANE_XY]: 0, diff --git a/frontend/javascripts/oxalis/controller/combinations/tool_controls.ts b/frontend/javascripts/oxalis/controller/combinations/tool_controls.ts index dc9c461de42..6da4b921ca4 100644 --- a/frontend/javascripts/oxalis/controller/combinations/tool_controls.ts +++ b/frontend/javascripts/oxalis/controller/combinations/tool_controls.ts @@ -1,60 +1,60 @@ +import features from "features"; import type { ModifierKeys } from "libs/input"; -import * as THREE from "three"; -import type { OrthoView, Point2, AnnotationTool, Vector3, Viewport } from "oxalis/constants"; -import { OrthoViews, ContourModeEnum, AnnotationToolEnum } from "oxalis/constants"; -import { - enforceActiveVolumeTracing, - getActiveSegmentationTracing, - getContourTracingMode, - getSegmentColorAsHSLA, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { - handleAgglomerateSkeletonAtClick, - handleClickSegment, -} from "oxalis/controller/combinations/segmentation_handlers"; -import { - computeQuickSelectForPointAction, - computeQuickSelectForRectAction, - confirmQuickSelectAction, - hideBrushAction, -} from "oxalis/model/actions/volumetracing_actions"; -import { isBrushTool } from "oxalis/model/accessors/tool_accessor"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { finishedResizingUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; -import * as MoveHandlers from "oxalis/controller/combinations/move_handlers"; -import type PlaneView from "oxalis/view/plane_view"; -import * as SkeletonHandlers from "oxalis/controller/combinations/skeleton_handlers"; +import { V3 } from "libs/mjs"; +import * as Utils from "libs/utils"; +import { document } from "libs/window"; +import type { AnnotationTool, OrthoView, Point2, Vector3, Viewport } from "oxalis/constants"; +import { AnnotationToolEnum, ContourModeEnum, OrthoViews } from "oxalis/constants"; import { + type SelectedEdge, createBoundingBoxAndGetEdges, handleMovingBoundingBox, - type SelectedEdge, } from "oxalis/controller/combinations/bounding_box_handlers"; import { getClosestHoveredBoundingBox, handleResizingBoundingBox, highlightAndSetCursorOnHoveredBoundingBox, } from "oxalis/controller/combinations/bounding_box_handlers"; -import Store from "oxalis/store"; -import * as Utils from "libs/utils"; +import * as MoveHandlers from "oxalis/controller/combinations/move_handlers"; +import { + handleAgglomerateSkeletonAtClick, + handleClickSegment, +} from "oxalis/controller/combinations/segmentation_handlers"; +import * as SkeletonHandlers from "oxalis/controller/combinations/skeleton_handlers"; import * as VolumeHandlers from "oxalis/controller/combinations/volume_handlers"; -import { document } from "libs/window"; -import { api } from "oxalis/singletons"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import { isBrushTool } from "oxalis/model/accessors/tool_accessor"; +import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; +import { + enforceActiveVolumeTracing, + getActiveSegmentationTracing, + getContourTracingMode, + getSegmentColorAsHSLA, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { finishedResizingUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; import { minCutAgglomerateWithPositionAction, proofreadAtPosition, proofreadMerge, } from "oxalis/model/actions/proofread_actions"; -import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; -import { V3 } from "libs/mjs"; import { hideMeasurementTooltipAction, - setQuickSelectStateAction, - setLastMeasuredPositionAction, - setIsMeasuringAction, setActiveUserBoundingBoxId, + setIsMeasuringAction, + setLastMeasuredPositionAction, + setQuickSelectStateAction, } from "oxalis/model/actions/ui_actions"; +import { + computeQuickSelectForPointAction, + computeQuickSelectForRectAction, + confirmQuickSelectAction, + hideBrushAction, +} from "oxalis/model/actions/volumetracing_actions"; +import { api } from "oxalis/singletons"; +import Store from "oxalis/store"; import type ArbitraryView from "oxalis/view/arbitrary_view"; -import features from "features"; +import type PlaneView from "oxalis/view/plane_view"; +import * as THREE from "three"; export type ActionDescriptor = { leftClick?: string; diff --git a/frontend/javascripts/oxalis/controller/combinations/volume_handlers.ts b/frontend/javascripts/oxalis/controller/combinations/volume_handlers.ts index cd8651756b9..7abd4d4aac6 100644 --- a/frontend/javascripts/oxalis/controller/combinations/volume_handlers.ts +++ b/frontend/javascripts/oxalis/controller/combinations/volume_handlers.ts @@ -1,20 +1,20 @@ +import { V3 } from "libs/mjs"; +import memoizeOne from "memoize-one"; import type { OrthoView, Point2, Vector3 } from "oxalis/constants"; import { ContourModeEnum } from "oxalis/constants"; import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; +import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import { - startEditingAction, - floodFillAction, addToLayerAction, finishEditingAction, - setContourTracingModeAction, - setActiveCellAction, + floodFillAction, resetContourAction, + setActiveCellAction, + setContourTracingModeAction, + startEditingAction, } from "oxalis/model/actions/volumetracing_actions"; import { Model, Store, api } from "oxalis/singletons"; -import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import type { AdditionalCoordinate } from "types/api_flow_types"; -import memoizeOne from "memoize-one"; -import { V3 } from "libs/mjs"; export function handleDrawStart(pos: Point2, plane: OrthoView) { const state = Store.getState(); diff --git a/frontend/javascripts/oxalis/controller/custom_lod.ts b/frontend/javascripts/oxalis/controller/custom_lod.ts index 209f116b0f4..5db735708cc 100644 --- a/frontend/javascripts/oxalis/controller/custom_lod.ts +++ b/frontend/javascripts/oxalis/controller/custom_lod.ts @@ -1,6 +1,6 @@ -import * as THREE from "three"; -import Store from "oxalis/store"; import { getTDViewZoom } from "oxalis/model/accessors/view_mode_accessor"; +import Store from "oxalis/store"; +import * as THREE from "three"; export default class CustomLOD extends THREE.LOD { noLODGroup: THREE.Group; diff --git a/frontend/javascripts/oxalis/controller/merger_mode_controller.tsx b/frontend/javascripts/oxalis/controller/merger_mode_controller.tsx index bbe0036df0f..6c27db76290 100644 --- a/frontend/javascripts/oxalis/controller/merger_mode_controller.tsx +++ b/frontend/javascripts/oxalis/controller/merger_mode_controller.tsx @@ -1,8 +1,8 @@ -import { connect } from "react-redux"; -import { PureComponent } from "react"; +import { disableMergerMode, enableMergerMode } from "oxalis/merger_mode"; import type { OxalisState } from "oxalis/store"; -import { enableMergerMode, disableMergerMode } from "oxalis/merger_mode"; import MergerModeModalView from "oxalis/view/merger_mode_modal_view"; +import { PureComponent } from "react"; +import { connect } from "react-redux"; type MergerModeControllerProps = { isMergerModeEnabled: boolean; }; diff --git a/frontend/javascripts/oxalis/controller/renderer.ts b/frontend/javascripts/oxalis/controller/renderer.ts index 1429dd69c39..683db3a4e49 100644 --- a/frontend/javascripts/oxalis/controller/renderer.ts +++ b/frontend/javascripts/oxalis/controller/renderer.ts @@ -1,6 +1,6 @@ -import * as THREE from "three"; import { document } from "libs/window"; import { Store } from "oxalis/singletons"; +import * as THREE from "three"; let renderer: THREE.WebGLRenderer | null = null; function getRenderer(): THREE.WebGLRenderer { diff --git a/frontend/javascripts/oxalis/controller/scene_controller.ts b/frontend/javascripts/oxalis/controller/scene_controller.ts index 382c6a5a7b3..2bbdc04bad2 100644 --- a/frontend/javascripts/oxalis/controller/scene_controller.ts +++ b/frontend/javascripts/oxalis/controller/scene_controller.ts @@ -1,4 +1,3 @@ -import * as THREE from "three"; import app from "app"; import type Maybe from "data.maybe"; import { V3 } from "libs/mjs"; @@ -46,6 +45,7 @@ import { getVoxelPerUnit } from "oxalis/model/scaleinfo"; import { Model } from "oxalis/singletons"; import type { OxalisState, SkeletonTracing, UserBoundingBox } from "oxalis/store"; import Store from "oxalis/store"; +import * as THREE from "three"; import SegmentMeshController from "./segment_mesh_controller"; const CUBE_COLOR = 0x999999; diff --git a/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts b/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts index fcb4b0031af..a7a86134906 100644 --- a/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts +++ b/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts @@ -1,19 +1,19 @@ -// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message -import TWEEN from "tween.js"; -import * as THREE from "three"; import app from "app"; import { mergeVertices } from "libs/BufferGeometryUtils"; import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; import CustomLOD from "oxalis/controller/custom_lod"; +import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; import { getActiveSegmentationTracing, getSegmentColorAsHSLA, } from "oxalis/model/accessors/volumetracing_accessor"; import { NO_LOD_MESH_INDEX } from "oxalis/model/sagas/mesh_saga"; import Store from "oxalis/store"; +import * as THREE from "three"; +// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message +import TWEEN from "tween.js"; import type { AdditionalCoordinate } from "types/api_flow_types"; -import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; const ACTIVATED_COLOR = [0.7, 0.5, 0.1] as const; const HOVERED_COLOR = [0.65, 0.5, 0.1] as const; diff --git a/frontend/javascripts/oxalis/controller/td_controller.tsx b/frontend/javascripts/oxalis/controller/td_controller.tsx index 46e37444f30..aef7d9992a5 100644 --- a/frontend/javascripts/oxalis/controller/td_controller.tsx +++ b/frontend/javascripts/oxalis/controller/td_controller.tsx @@ -1,43 +1,43 @@ -import _ from "lodash"; -import { connect } from "react-redux"; -import * as React from "react"; -import * as THREE from "three"; import { InputMouse } from "libs/input"; +import { V3 } from "libs/mjs"; +import TrackballControls from "libs/trackball_controls"; +import * as Utils from "libs/utils"; +import _ from "lodash"; import { type AnnotationTool, AnnotationToolEnum, type OrthoView, - OrthoViews, type OrthoViewMap, + OrthoViews, type Point2, type Vector3, } from "oxalis/constants"; -import { V3 } from "libs/mjs"; +import CameraController from "oxalis/controller/camera_controller"; +import { handleOpenContextMenu } from "oxalis/controller/combinations/skeleton_handlers"; +import { ProofreadTool, SkeletonTool } from "oxalis/controller/combinations/tool_controls"; import { getPosition } from "oxalis/model/accessors/flycam_accessor"; -import { getViewportScale, getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; +import { getActiveNode, getNodePosition } from "oxalis/model/accessors/skeletontracing_accessor"; +import { getInputCatcherRect, getViewportScale } from "oxalis/model/accessors/view_mode_accessor"; +import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; import { setPositionAction } from "oxalis/model/actions/flycam_actions"; import { - setViewportAction, + moveTDViewByVectorWithoutTimeTrackingAction, + moveTDViewXAction, + moveTDViewYAction, setTDCameraAction, setTDCameraWithoutTimeTrackingAction, + setViewportAction, zoomTDViewAction, - moveTDViewXAction, - moveTDViewYAction, - moveTDViewByVectorWithoutTimeTrackingAction, } from "oxalis/model/actions/view_mode_actions"; -import { getActiveNode, getNodePosition } from "oxalis/model/accessors/skeletontracing_accessor"; +import { setActiveCellAction } from "oxalis/model/actions/volumetracing_actions"; import { voxelToUnit } from "oxalis/model/scaleinfo"; -import CameraController from "oxalis/controller/camera_controller"; -import type PlaneView from "oxalis/view/plane_view"; import type { CameraData, OxalisState, Tracing } from "oxalis/store"; import Store from "oxalis/store"; -import TrackballControls from "libs/trackball_controls"; -import * as Utils from "libs/utils"; -import { ProofreadTool, SkeletonTool } from "oxalis/controller/combinations/tool_controls"; -import { handleOpenContextMenu } from "oxalis/controller/combinations/skeleton_handlers"; +import type PlaneView from "oxalis/view/plane_view"; +import * as React from "react"; +import { connect } from "react-redux"; +import * as THREE from "three"; import type { VoxelSize } from "types/api_flow_types"; -import { setActiveCellAction } from "oxalis/model/actions/volumetracing_actions"; -import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; export function threeCameraToCameraData(camera: THREE.OrthographicCamera): CameraData { const { position, up, near, far, left, right, top, bottom } = camera; diff --git a/frontend/javascripts/oxalis/controller/url_manager.ts b/frontend/javascripts/oxalis/controller/url_manager.ts index 68a88d5708d..5274ab83c00 100644 --- a/frontend/javascripts/oxalis/controller/url_manager.ts +++ b/frontend/javascripts/oxalis/controller/url_manager.ts @@ -1,26 +1,26 @@ -import _ from "lodash"; +import ErrorHandling from "libs/error_handling"; import { V3 } from "libs/mjs"; -import { applyState } from "oxalis/model_initialization"; -import { getRotation, getPosition } from "oxalis/model/accessors/flycam_accessor"; -import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; -import type { OxalisState, MappingType, MeshInformation } from "oxalis/store"; -import Store from "oxalis/store"; +import Toast from "libs/toast"; import * as Utils from "libs/utils"; -import type { ViewMode, Vector3 } from "oxalis/constants"; -import constants, { ViewModeValues, MappingStatusEnum } from "oxalis/constants"; +import { coalesce } from "libs/utils"; import window, { location } from "libs/window"; -import ErrorHandling from "libs/error_handling"; -import Toast from "libs/toast"; +import _ from "lodash"; import messages from "messages"; -import { validateUrlStateJSON } from "types/validation"; -import { type APIAnnotationType, APICompoundTypeEnum } from "types/api_flow_types"; -import { coalesce } from "libs/utils"; -import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { Vector3, ViewMode } from "oxalis/constants"; +import constants, { ViewModeValues, MappingStatusEnum } from "oxalis/constants"; +import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; +import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import { getMeshesForCurrentAdditionalCoordinates } from "oxalis/model/accessors/volumetracing_accessor"; import { additionalCoordinateToKeyValue, parseAdditionalCoordinateKey, } from "oxalis/model/helpers/nml_helpers"; -import { getMeshesForCurrentAdditionalCoordinates } from "oxalis/model/accessors/volumetracing_accessor"; +import { applyState } from "oxalis/model_initialization"; +import type { MappingType, MeshInformation, OxalisState } from "oxalis/store"; +import Store from "oxalis/store"; +import { type APIAnnotationType, APICompoundTypeEnum } from "types/api_flow_types"; +import type { AdditionalCoordinate } from "types/api_flow_types"; +import { validateUrlStateJSON } from "types/validation"; const MAX_UPDATE_INTERVAL = 1000; const MINIMUM_VALID_CSV_LENGTH = 5; diff --git a/frontend/javascripts/oxalis/controller/viewmodes/arbitrary_controller.tsx b/frontend/javascripts/oxalis/controller/viewmodes/arbitrary_controller.tsx index 7f81ed18181..3878eb43a86 100644 --- a/frontend/javascripts/oxalis/controller/viewmodes/arbitrary_controller.tsx +++ b/frontend/javascripts/oxalis/controller/viewmodes/arbitrary_controller.tsx @@ -1,51 +1,51 @@ -import * as React from "react"; import type { ModifierKeys } from "libs/input"; import { InputKeyboard, InputKeyboardNoLoop, InputMouse } from "libs/input"; import type { Matrix4x4 } from "libs/mjs"; import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import messages from "messages"; +import type { Point2, Vector3, ViewMode, Viewport } from "oxalis/constants"; +import constants, { ArbitraryViewport } from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import TDController from "oxalis/controller/td_controller"; +import ArbitraryPlane from "oxalis/geometries/arbitrary_plane"; +import Crosshair from "oxalis/geometries/crosshair"; +import { getMoveOffset3d, getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; import { getActiveNode, getMaxNodeId, getNodePosition, untransformNodePosition, } from "oxalis/model/accessors/skeletontracing_accessor"; -import { getRotation, getPosition, getMoveOffset3d } from "oxalis/model/accessors/flycam_accessor"; import { getViewportScale } from "oxalis/model/accessors/view_mode_accessor"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { - setActiveNodeAction, - deleteNodeAsUserAction, - createNodeAction, - createBranchPointAction, - requestDeleteBranchPointAction, - toggleAllTreesAction, - toggleInactiveTreesAction, - createTreeAction, -} from "oxalis/model/actions/skeletontracing_actions"; + moveFlycamAction, + pitchFlycamAction, + yawFlycamAction, + zoomInAction, + zoomOutAction, +} from "oxalis/model/actions/flycam_actions"; import { setFlightmodeRecordingAction, updateUserSettingAction, } from "oxalis/model/actions/settings_actions"; import { - yawFlycamAction, - pitchFlycamAction, - zoomInAction, - zoomOutAction, - moveFlycamAction, -} from "oxalis/model/actions/flycam_actions"; -import ArbitraryPlane from "oxalis/geometries/arbitrary_plane"; -import ArbitraryView from "oxalis/view/arbitrary_view"; -import Crosshair from "oxalis/geometries/crosshair"; -import Store from "oxalis/store"; -import TDController from "oxalis/controller/td_controller"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; + createBranchPointAction, + createNodeAction, + createTreeAction, + deleteNodeAsUserAction, + requestDeleteBranchPointAction, + setActiveNodeAction, + toggleAllTreesAction, + toggleInactiveTreesAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { api } from "oxalis/singletons"; -import type { ViewMode, Point2, Vector3, Viewport } from "oxalis/constants"; -import constants, { ArbitraryViewport } from "oxalis/constants"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import messages from "messages"; +import Store from "oxalis/store"; +import ArbitraryView from "oxalis/view/arbitrary_view"; import { downloadScreenshot } from "oxalis/view/rendering_utils"; +import * as React from "react"; import { SkeletonTool } from "../combinations/tool_controls"; const arbitraryViewportId = "inputcatcher_arbitraryViewport"; diff --git a/frontend/javascripts/oxalis/controller/viewmodes/plane_controller.tsx b/frontend/javascripts/oxalis/controller/viewmodes/plane_controller.tsx index 99dfb486eea..b24ae800199 100644 --- a/frontend/javascripts/oxalis/controller/viewmodes/plane_controller.tsx +++ b/frontend/javascripts/oxalis/controller/viewmodes/plane_controller.tsx @@ -1,70 +1,70 @@ -import { connect } from "react-redux"; -import * as React from "react"; -import _ from "lodash"; -import dimensions from "oxalis/model/dimensions"; -import { - deleteNodeAsUserAction, - createTreeAction, - createBranchPointAction, - requestDeleteBranchPointAction, - toggleAllTreesAction, - toggleInactiveTreesAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; import { InputKeyboard, InputKeyboardNoLoop, InputMouse, type MouseBindingMap } from "libs/input"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; import { document } from "libs/window"; +import _ from "lodash"; +import type { AnnotationTool, OrthoView, OrthoViewMap } from "oxalis/constants"; +import { AnnotationToolEnum, OrthoViewValuesWithoutTDView, OrthoViews } from "oxalis/constants"; +import * as MoveHandlers from "oxalis/controller/combinations/move_handlers"; +import * as SkeletonHandlers from "oxalis/controller/combinations/skeleton_handlers"; +import { + AreaMeasurementTool, + BoundingBoxTool, + DrawTool, + EraseTool, + FillCellTool, + LineMeasurementTool, + MoveTool, + PickCellTool, + ProofreadTool, + QuickSelectTool, + SkeletonTool, +} from "oxalis/controller/combinations/tool_controls"; +import * as VolumeHandlers from "oxalis/controller/combinations/volume_handlers"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import TDController from "oxalis/controller/td_controller"; import { - getPosition, getActiveMagIndexForLayer, getMoveOffset, + getPosition, } from "oxalis/model/accessors/flycam_accessor"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; -import { setViewportAction } from "oxalis/model/actions/view_mode_actions"; +import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; +import { + getActiveSegmentationTracing, + getMaximumBrushSize, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; -import { Model, api } from "oxalis/singletons"; -import PlaneView from "oxalis/view/plane_view"; -import type { BrushPresets, OxalisState, Tracing } from "oxalis/store"; -import Store from "oxalis/store"; -import TDController from "oxalis/controller/td_controller"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; import { - createCellAction, - interpolateSegmentationLayerAction, -} from "oxalis/model/actions/volumetracing_actions"; + createBranchPointAction, + createTreeAction, + deleteNodeAsUserAction, + requestDeleteBranchPointAction, + toggleAllTreesAction, + toggleInactiveTreesAction, +} from "oxalis/model/actions/skeletontracing_actions"; import { cycleToolAction, enterAction, escapeAction, setToolAction, } from "oxalis/model/actions/ui_actions"; +import { setViewportAction } from "oxalis/model/actions/view_mode_actions"; import { - MoveTool, - SkeletonTool, - DrawTool, - EraseTool, - PickCellTool, - FillCellTool, - BoundingBoxTool, - QuickSelectTool, - ProofreadTool, - LineMeasurementTool, - AreaMeasurementTool, -} from "oxalis/controller/combinations/tool_controls"; -import type { OrthoView, OrthoViewMap, AnnotationTool } from "oxalis/constants"; -import { OrthoViewValuesWithoutTDView, OrthoViews, AnnotationToolEnum } from "oxalis/constants"; -import { calculateGlobalPos } from "oxalis/model/accessors/view_mode_accessor"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import * as SkeletonHandlers from "oxalis/controller/combinations/skeleton_handlers"; -import * as VolumeHandlers from "oxalis/controller/combinations/volume_handlers"; -import * as MoveHandlers from "oxalis/controller/combinations/move_handlers"; -import { downloadScreenshot } from "oxalis/view/rendering_utils"; -import { - getActiveSegmentationTracing, - getMaximumBrushSize, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { showToastWarningForLargestSegmentIdMissing } from "oxalis/view/largest_segment_id_modal"; + createCellAction, + interpolateSegmentationLayerAction, +} from "oxalis/model/actions/volumetracing_actions"; +import dimensions from "oxalis/model/dimensions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import { Model, api } from "oxalis/singletons"; +import type { BrushPresets, OxalisState, Tracing } from "oxalis/store"; +import Store from "oxalis/store"; import { getDefaultBrushSizes } from "oxalis/view/action-bar/toolbar_view"; +import { showToastWarningForLargestSegmentIdMissing } from "oxalis/view/largest_segment_id_modal"; +import PlaneView from "oxalis/view/plane_view"; +import { downloadScreenshot } from "oxalis/view/rendering_utils"; +import * as React from "react"; +import { connect } from "react-redux"; import { userSettings } from "types/schemas/user_settings.schema"; import { highlightAndSetCursorOnHoveredBoundingBox } from "../combinations/bounding_box_handlers"; diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index ebecb960a9b..7d073b33558 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -1,5 +1,3 @@ -import type { OxalisState } from "oxalis/store"; -import { defaultDatasetViewConfigurationWithoutNull } from "types/schemas/dataset_view_configuration.schema"; import Constants, { ControlModeEnum, OrthoViews, @@ -9,13 +7,15 @@ import Constants, { InterpolationModeEnum, UnitLong, } from "oxalis/constants"; +import constants from "oxalis/constants"; +import type { OxalisState } from "oxalis/store"; +import { getSystemColorTheme } from "theme"; import type { APIAllowedMode, APIAnnotationType, APIAnnotationVisibility, } from "types/api_flow_types"; -import constants from "oxalis/constants"; -import { getSystemColorTheme } from "theme"; +import { defaultDatasetViewConfigurationWithoutNull } from "types/schemas/dataset_view_configuration.schema"; const defaultViewportRect = { top: 0, diff --git a/frontend/javascripts/oxalis/geometries/arbitrary_plane.ts b/frontend/javascripts/oxalis/geometries/arbitrary_plane.ts index b7006eb94d0..cc1ef5151ca 100644 --- a/frontend/javascripts/oxalis/geometries/arbitrary_plane.ts +++ b/frontend/javascripts/oxalis/geometries/arbitrary_plane.ts @@ -1,11 +1,11 @@ -import * as THREE from "three"; import _ from "lodash"; -import { getZoomedMatrix } from "oxalis/model/accessors/flycam_accessor"; -import PlaneMaterialFactory from "oxalis/geometries/materials/plane_material_factory"; -import Store from "oxalis/store"; import constants, { OrthoViews } from "oxalis/constants"; import getSceneController from "oxalis/controller/scene_controller_provider"; +import PlaneMaterialFactory from "oxalis/geometries/materials/plane_material_factory"; +import { getZoomedMatrix } from "oxalis/model/accessors/flycam_accessor"; import shaderEditor from "oxalis/model/helpers/shader_editor"; +import Store from "oxalis/store"; +import * as THREE from "three"; // Let's set up our trianglesplane. // It serves as a "canvas" where the brain images // are drawn. diff --git a/frontend/javascripts/oxalis/geometries/crosshair.ts b/frontend/javascripts/oxalis/geometries/crosshair.ts index d01ae01b6ed..27825d58b76 100644 --- a/frontend/javascripts/oxalis/geometries/crosshair.ts +++ b/frontend/javascripts/oxalis/geometries/crosshair.ts @@ -1,6 +1,6 @@ -import * as THREE from "three"; import { getZoomedMatrix } from "oxalis/model/accessors/flycam_accessor"; import Store from "oxalis/store"; +import * as THREE from "three"; class Crosshair { mesh: THREE.Group; diff --git a/frontend/javascripts/oxalis/geometries/cube.ts b/frontend/javascripts/oxalis/geometries/cube.ts index 75c7e9e4c38..a749060b909 100644 --- a/frontend/javascripts/oxalis/geometries/cube.ts +++ b/frontend/javascripts/oxalis/geometries/cube.ts @@ -1,12 +1,12 @@ -import * as THREE from "three"; +import app from "app"; import _ from "lodash"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import type { OrthoView, OrthoViewWithoutTDMap, Vector3 } from "oxalis/constants"; import { OrthoViewValuesWithoutTDView, OrthoViews } from "oxalis/constants"; import { getPosition } from "oxalis/model/accessors/flycam_accessor"; -import Store from "oxalis/throttled_store"; -import app from "app"; import dimensions from "oxalis/model/dimensions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import Store from "oxalis/throttled_store"; +import * as THREE from "three"; type Properties = { min?: Vector3; max: Vector3; diff --git a/frontend/javascripts/oxalis/geometries/helper_geometries.ts b/frontend/javascripts/oxalis/geometries/helper_geometries.ts index 3631959f3da..8af50ca7079 100644 --- a/frontend/javascripts/oxalis/geometries/helper_geometries.ts +++ b/frontend/javascripts/oxalis/geometries/helper_geometries.ts @@ -1,11 +1,11 @@ -import * as THREE from "three"; -import { type OrthoView, OrthoViews, type Vector3 } from "oxalis/constants"; -import ResizableBuffer from "libs/resizable_buffer"; import app from "app"; import { V3 } from "libs/mjs"; -import Store from "oxalis/store"; +import ResizableBuffer from "libs/resizable_buffer"; +import { type OrthoView, OrthoViews, type Vector3 } from "oxalis/constants"; import Dimensions from "oxalis/model/dimensions"; import { getBaseVoxelInUnit } from "oxalis/model/scaleinfo"; +import Store from "oxalis/store"; +import * as THREE from "three"; export const CONTOUR_COLOR_NORMAL = new THREE.Color(0x0000ff); export const CONTOUR_COLOR_DELETE = new THREE.Color(0xff0000); diff --git a/frontend/javascripts/oxalis/geometries/materials/edge_shader.ts b/frontend/javascripts/oxalis/geometries/materials/edge_shader.ts index 5983bdc256a..ae6b471d830 100644 --- a/frontend/javascripts/oxalis/geometries/materials/edge_shader.ts +++ b/frontend/javascripts/oxalis/geometries/materials/edge_shader.ts @@ -1,17 +1,17 @@ -import * as THREE from "three"; +import { M4x4 } from "libs/mjs"; +import type TPS3D from "libs/thin_plate_spline"; +import _ from "lodash"; import { COLOR_TEXTURE_WIDTH_FIXED } from "oxalis/geometries/materials/node_shader"; import type { Uniforms } from "oxalis/geometries/materials/plane_material_factory"; +import { getTransformsForSkeletonLayer } from "oxalis/model/accessors/dataset_accessor"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import shaderEditor from "oxalis/model/helpers/shader_editor"; -import { Store } from "oxalis/singletons"; -import _ from "lodash"; -import { getTransformsForSkeletonLayer } from "oxalis/model/accessors/dataset_accessor"; -import { M4x4 } from "libs/mjs"; import { generateCalculateTpsOffsetFunction, generateTpsInitialization, } from "oxalis/shaders/thin_plate_spline.glsl"; -import type TPS3D from "libs/thin_plate_spline"; +import { Store } from "oxalis/singletons"; +import * as THREE from "three"; class EdgeShader { material: THREE.RawShaderMaterial; diff --git a/frontend/javascripts/oxalis/geometries/materials/node_shader.ts b/frontend/javascripts/oxalis/geometries/materials/node_shader.ts index 84cec0fe626..8364bdcf1f7 100644 --- a/frontend/javascripts/oxalis/geometries/materials/node_shader.ts +++ b/frontend/javascripts/oxalis/geometries/materials/node_shader.ts @@ -1,20 +1,20 @@ -import * as THREE from "three"; +import { M4x4 } from "libs/mjs"; +import type TPS3D from "libs/thin_plate_spline"; +import _ from "lodash"; import { ViewModeValues, ViewModeValuesIndices } from "oxalis/constants"; import type { Uniforms } from "oxalis/geometries/materials/plane_material_factory"; -import { getBaseVoxelInUnit } from "oxalis/model/scaleinfo"; +import { getTransformsForSkeletonLayer } from "oxalis/model/accessors/dataset_accessor"; import { getZoomValue } from "oxalis/model/accessors/flycam_accessor"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; -import { Store } from "oxalis/singletons"; import shaderEditor from "oxalis/model/helpers/shader_editor"; -import _ from "lodash"; -import { formatNumberAsGLSLFloat } from "oxalis/shaders/utils.glsl"; -import { getTransformsForSkeletonLayer } from "oxalis/model/accessors/dataset_accessor"; -import { M4x4 } from "libs/mjs"; +import { getBaseVoxelInUnit } from "oxalis/model/scaleinfo"; import { generateCalculateTpsOffsetFunction, generateTpsInitialization, } from "oxalis/shaders/thin_plate_spline.glsl"; -import type TPS3D from "libs/thin_plate_spline"; +import { formatNumberAsGLSLFloat } from "oxalis/shaders/utils.glsl"; +import { Store } from "oxalis/singletons"; +import * as THREE from "three"; export const NodeTypes = { INVALID: 0.0, diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts index aabec1e2429..5d79b46e7e2 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts @@ -1,58 +1,58 @@ -import * as THREE from "three"; +import app from "app"; +import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; +import { V3 } from "libs/mjs"; +import type TPS3D from "libs/thin_plate_spline"; +import * as Utils from "libs/utils"; import _ from "lodash"; import { BLEND_MODES, Identity4x4, type OrthoView, type Vector3 } from "oxalis/constants"; import { - ViewModeValues, + AnnotationToolEnum, + MappingStatusEnum, OrthoViewValues, OrthoViews, - MappingStatusEnum, - AnnotationToolEnum, + ViewModeValues, } from "oxalis/constants"; -import { calculateGlobalPos, getViewportExtents } from "oxalis/model/accessors/view_mode_accessor"; -import { isBrushTool } from "oxalis/model/accessors/tool_accessor"; -import { - getActiveCellId, - getActiveSegmentationTracing, - getActiveSegmentPosition, - getBucketRetrievalSourceFn, - needsLocalHdf5Mapping, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { getPackingDegree } from "oxalis/model/bucket_data_handling/data_rendering_logic"; import { + getByteCount, getColorLayers, getDataLayers, - getByteCount, - getElementClass, getDatasetBoundingBox, + getElementClass, getEnabledLayers, - getSegmentationLayerWithMappingSupport, - getMappingInfoForSupportedLayer, - getVisibleSegmentationLayer, getLayerByName, - invertAndTranspose, - getTransformsForLayer, - getMagInfoByLayer, getMagInfo, + getMagInfoByLayer, + getMappingInfoForSupportedLayer, + getSegmentationLayerWithMappingSupport, + getTransformsForLayer, getTransformsPerLayer, + getVisibleSegmentationLayer, + invertAndTranspose, } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagIndicesForLayers, getUnrenderableLayerInfosForCurrentZoom, getZoomValue, } from "oxalis/model/accessors/flycam_accessor"; +import { isBrushTool } from "oxalis/model/accessors/tool_accessor"; +import { calculateGlobalPos, getViewportExtents } from "oxalis/model/accessors/view_mode_accessor"; +import { + getActiveCellId, + getActiveSegmentPosition, + getActiveSegmentationTracing, + getBucketRetrievalSourceFn, + needsLocalHdf5Mapping, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { getPackingDegree } from "oxalis/model/bucket_data_handling/data_rendering_logic"; +import { getGlobalLayerIndexForLayerName } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import shaderEditor from "oxalis/model/helpers/shader_editor"; +import getMainFragmentShader, { getMainVertexShader } from "oxalis/shaders/main_data_shaders.glsl"; import { Model } from "oxalis/singletons"; import type { DatasetLayerConfiguration } from "oxalis/store"; import Store from "oxalis/store"; -import * as Utils from "libs/utils"; -import app from "app"; -import getMainFragmentShader, { getMainVertexShader } from "oxalis/shaders/main_data_shaders.glsl"; -import shaderEditor from "oxalis/model/helpers/shader_editor"; +import * as THREE from "three"; import type { ElementClass } from "types/api_flow_types"; -import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; -import { getGlobalLayerIndexForLayerName } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; -import { V3 } from "libs/mjs"; -import type TPS3D from "libs/thin_plate_spline"; type ShaderMaterialOptions = { polygonOffset?: boolean; diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory_helpers.ts b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory_helpers.ts index e835a529bb0..2b29b5f1478 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory_helpers.ts +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory_helpers.ts @@ -1,5 +1,5 @@ -import * as THREE from "three"; import UpdatableTexture from "libs/UpdatableTexture"; +import * as THREE from "three"; function channelCountToFormat(channelCount: number, type: THREE.TextureDataType) { switch (channelCount) { diff --git a/frontend/javascripts/oxalis/geometries/plane.ts b/frontend/javascripts/oxalis/geometries/plane.ts index 78369e54852..6bef85c7377 100644 --- a/frontend/javascripts/oxalis/geometries/plane.ts +++ b/frontend/javascripts/oxalis/geometries/plane.ts @@ -1,9 +1,4 @@ -import * as THREE from "three"; import _ from "lodash"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; -import Dimensions from "oxalis/model/dimensions"; -import PlaneMaterialFactory from "oxalis/geometries/materials/plane_material_factory"; -import Store from "oxalis/store"; import type { OrthoView, Vector3 } from "oxalis/constants"; import constants, { OrthoViewColors, @@ -11,6 +6,11 @@ import constants, { OrthoViewGrayCrosshairColor, OrthoViewValues, } from "oxalis/constants"; +import PlaneMaterialFactory from "oxalis/geometries/materials/plane_material_factory"; +import Dimensions from "oxalis/model/dimensions"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import Store from "oxalis/store"; +import * as THREE from "three"; // A subdivision of 100 means that there will be 100 segments per axis // and thus 101 vertices per axis (i.e., the vertex shader is executed 101**2). diff --git a/frontend/javascripts/oxalis/geometries/skeleton.ts b/frontend/javascripts/oxalis/geometries/skeleton.ts index 77941c66f29..a97e2484506 100644 --- a/frontend/javascripts/oxalis/geometries/skeleton.ts +++ b/frontend/javascripts/oxalis/geometries/skeleton.ts @@ -1,19 +1,19 @@ -import * as THREE from "three"; -import _ from "lodash"; import type Maybe from "data.maybe"; -import type { Tree, Node, Edge, OxalisState, SkeletonTracing } from "oxalis/store"; +import * as Utils from "libs/utils"; +import _ from "lodash"; import type { Vector3, Vector4 } from "oxalis/constants"; -import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; -import { getZoomValue } from "oxalis/model/accessors/flycam_accessor"; import EdgeShader from "oxalis/geometries/materials/edge_shader"; import NodeShader, { NodeTypes, COLOR_TEXTURE_WIDTH, } from "oxalis/geometries/materials/node_shader"; +import { getZoomValue } from "oxalis/model/accessors/flycam_accessor"; +import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; +import type { CreateActionNode, UpdateActionNode } from "oxalis/model/sagas/update_actions"; +import type { Edge, Node, OxalisState, SkeletonTracing, Tree } from "oxalis/store"; import Store from "oxalis/throttled_store"; -import * as Utils from "libs/utils"; +import * as THREE from "three"; import type { AdditionalCoordinate } from "types/api_flow_types"; -import type { CreateActionNode, UpdateActionNode } from "oxalis/model/sagas/update_actions"; const MAX_CAPACITY = 1000; diff --git a/frontend/javascripts/oxalis/merger_mode.ts b/frontend/javascripts/oxalis/merger_mode.ts index 5a4176e4589..d185e0b66f4 100644 --- a/frontend/javascripts/oxalis/merger_mode.ts +++ b/frontend/javascripts/oxalis/merger_mode.ts @@ -1,12 +1,7 @@ import _ from "lodash"; -import type { - DeleteNodeUpdateAction, - NodeWithTreeId, - UpdateActionNode, -} from "oxalis/model/sagas/update_actions"; -import type { TreeMap, SkeletonTracing, OxalisState, StoreType } from "oxalis/store"; +import messages from "messages"; +import type { UnregisterHandler } from "oxalis/api/api_latest"; import type { Vector3 } from "oxalis/constants"; -import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; import { getInverseSegmentationTransformer, getVisibleSegmentationLayer, @@ -16,13 +11,18 @@ import { getSkeletonTracing, transformNodePosition, } from "oxalis/model/accessors/skeletontracing_accessor"; -import Store from "oxalis/throttled_store"; -import { api } from "oxalis/singletons"; -import messages from "messages"; -import type { UnregisterHandler } from "oxalis/api/api_latest"; import type { Action } from "oxalis/model/actions/actions"; -import type { CreateNodeAction } from "./model/actions/skeletontracing_actions"; +import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; +import type { + DeleteNodeUpdateAction, + NodeWithTreeId, + UpdateActionNode, +} from "oxalis/model/sagas/update_actions"; +import { api } from "oxalis/singletons"; +import type { OxalisState, SkeletonTracing, StoreType, TreeMap } from "oxalis/store"; +import Store from "oxalis/throttled_store"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { CreateNodeAction } from "./model/actions/skeletontracing_actions"; type MergerModeState = { treeIdToRepresentativeSegmentId: Record; diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index 07f84588d31..4aea86a0b8c 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -1,22 +1,22 @@ +import { isDatasetAccessibleBySwitching } from "admin/admin_rest_api"; +import * as Utils from "libs/utils"; import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; -import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor"; -import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import { - getSegmentationLayerWithMappingSupport, getLayerByName, + getSegmentationLayerWithMappingSupport, isLayerVisible, } from "oxalis/model/accessors/dataset_accessor"; -import { getTotalSaveQueueLength } from "oxalis/model/reducers/save_reducer"; -import { isDatasetAccessibleBySwitching } from "admin/admin_rest_api"; +import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; +import { getActiveSegmentationTracingLayer } from "oxalis/model/accessors/volumetracing_accessor"; import { saveNowAction } from "oxalis/model/actions/save_actions"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import type DataLayer from "oxalis/model/data_layer"; import type LayerRenderingManager from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import type PullQueue from "oxalis/model/bucket_data_handling/pullqueue"; +import type DataLayer from "oxalis/model/data_layer"; +import { getTotalSaveQueueLength } from "oxalis/model/reducers/save_reducer"; import type { TraceOrViewCommand } from "oxalis/store"; import Store from "oxalis/store"; -import * as Utils from "libs/utils"; import type { APICompoundType } from "types/api_flow_types"; import { initialize } from "./model_initialization"; diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index 90fac26ea8f..d5fc3a89b8b 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -1,7 +1,28 @@ +import ErrorHandling from "libs/error_handling"; +import { formatExtentInUnitWithLength, formatNumberToLength } from "libs/format_utils"; +import { M4x4, type Matrix4x4, V3 } from "libs/mjs"; +import MultiKeyMap from "libs/multi_key_map"; +import { aggregateBoundingBox, maxValue } from "libs/utils"; import _ from "lodash"; import memoizeOne from "memoize-one"; +import messages from "messages"; +import { + IdentityTransform, + LongUnitToShortUnitMap, + type Vector3, + type Vector4, + type ViewMode, +} from "oxalis/constants"; +import constants, { ViewModeValues, Vector3Indicies, MappingStatusEnum } from "oxalis/constants"; +import type { + ActiveMappingInfo, + BoundingBoxObject, + DataLayerType, + DatasetConfiguration, + OxalisState, + Settings, +} from "oxalis/store"; import type { - AdditionalAxis, APIAllowedMode, APIDataLayer, APIDataset, @@ -9,39 +30,18 @@ import type { APIMaybeUnimportedDataset, APISegmentationLayer, APISkeletonLayer, + AdditionalAxis, ElementClass, } from "types/api_flow_types"; -import type { - Settings, - DataLayerType, - DatasetConfiguration, - BoundingBoxObject, - OxalisState, - ActiveMappingInfo, -} from "oxalis/store"; -import ErrorHandling from "libs/error_handling"; -import { - IdentityTransform, - LongUnitToShortUnitMap, - type Vector3, - type Vector4, - type ViewMode, -} from "oxalis/constants"; -import constants, { ViewModeValues, Vector3Indicies, MappingStatusEnum } from "oxalis/constants"; -import { aggregateBoundingBox, maxValue } from "libs/utils"; -import { formatExtentInUnitWithLength, formatNumberToLength } from "libs/format_utils"; -import messages from "messages"; import type { DataLayer } from "types/schemas/datasource.types"; import BoundingBox from "../bucket_data_handling/bounding_box"; -import { M4x4, type Matrix4x4, V3 } from "libs/mjs"; -import { convertToDenseMag, MagInfo } from "../helpers/mag_info"; -import MultiKeyMap from "libs/multi_key_map"; +import { MagInfo, convertToDenseMag } from "../helpers/mag_info"; import { + type Transform, chainTransforms, createAffineTransformFromMatrix, createThinPlateSplineTransform, invertTransform, - type Transform, transformPointUnscaled, } from "../helpers/transformation_helpers"; diff --git a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.ts b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.ts index c3040f57e8f..d3c1ecd0e94 100644 --- a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.ts @@ -1,22 +1,8 @@ -import * as THREE from "three"; -import _ from "lodash"; -import memoizeOne from "memoize-one"; -import type { DataLayerType, Flycam, LoadingStrategy, OxalisState } from "oxalis/store"; import type { Matrix4x4 } from "libs/mjs"; import { M4x4 } from "libs/mjs"; -import { getViewportRects } from "oxalis/model/accessors/view_mode_accessor"; -import { - getColorLayers, - getDataLayers, - getEnabledLayers, - getLayerByName, - getMaxZoomStep, - getMagInfo, - getTransformsForLayer, - invertAndTranspose, -} from "oxalis/model/accessors/dataset_accessor"; import { map3, mod } from "libs/utils"; -import Dimensions from "oxalis/model/dimensions"; +import _ from "lodash"; +import memoizeOne from "memoize-one"; import type { OrthoView, OrthoViewMap, @@ -27,16 +13,30 @@ import type { ViewMode, } from "oxalis/constants"; import constants, { OrthoViews } from "oxalis/constants"; +import { + getColorLayers, + getDataLayers, + getEnabledLayers, + getLayerByName, + getMagInfo, + getMaxZoomStep, + getTransformsForLayer, + invertAndTranspose, +} from "oxalis/model/accessors/dataset_accessor"; +import { getViewportRects } from "oxalis/model/accessors/view_mode_accessor"; import determineBucketsForFlight from "oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker"; import determineBucketsForOblique from "oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker"; -import * as scaleInfo from "oxalis/model/scaleinfo"; -import { reuseInstanceOnEquality } from "./accessor_helpers"; -import { baseDatasetViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; -import { getMatrixScale, rotateOnAxis } from "../reducers/flycam_reducer"; -import type { SmallerOrHigherInfo } from "../helpers/mag_info"; +import Dimensions from "oxalis/model/dimensions"; +import * as scaleInfo from "oxalis/model/scaleinfo"; import { getBaseVoxelInUnit } from "oxalis/model/scaleinfo"; +import type { DataLayerType, Flycam, LoadingStrategy, OxalisState } from "oxalis/store"; +import * as THREE from "three"; import type { AdditionalCoordinate, VoxelSize } from "types/api_flow_types"; +import { baseDatasetViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; +import type { SmallerOrHigherInfo } from "../helpers/mag_info"; +import { getMatrixScale, rotateOnAxis } from "../reducers/flycam_reducer"; +import { reuseInstanceOnEquality } from "./accessor_helpers"; export const ZOOM_STEP_INTERVAL = 1.1; diff --git a/frontend/javascripts/oxalis/model/accessors/organization_accessors.ts b/frontend/javascripts/oxalis/model/accessors/organization_accessors.ts index 823d5cc8f5b..7d3e6077f10 100644 --- a/frontend/javascripts/oxalis/model/accessors/organization_accessors.ts +++ b/frontend/javascripts/oxalis/model/accessors/organization_accessors.ts @@ -1,5 +1,5 @@ -import type { APIOrganization } from "types/api_flow_types"; import messages from "messages"; +import type { APIOrganization } from "types/api_flow_types"; export function enforceActiveOrganization( activeOrganization: APIOrganization | null, diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index 44eb0c995ef..d1e55ea8e5d 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -1,34 +1,34 @@ import Maybe from "data.maybe"; import _ from "lodash"; -import { - type ServerTracing, - type ServerSkeletonTracing, - type APIAnnotation, - type AnnotationLayerDescriptor, - AnnotationLayerEnum, -} from "types/api_flow_types"; +import type { TreeType, Vector3 } from "oxalis/constants"; import type { - Tracing, + BranchPoint, + Node, + NumberLike, + OxalisState, SkeletonTracing, + Tracing, Tree, - TreeMap, - BranchPoint, TreeGroup, TreeGroupTypeFlat, - Node, - OxalisState, - NumberLike, + TreeMap, } from "oxalis/store"; import { - findGroup, MISSING_GROUP_ID, + findGroup, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import type { TreeType, Vector3 } from "oxalis/constants"; +import { + type APIAnnotation, + type AnnotationLayerDescriptor, + AnnotationLayerEnum, + type ServerSkeletonTracing, + type ServerTracing, +} from "types/api_flow_types"; +import { invertTransform, transformPointUnscaled } from "../helpers/transformation_helpers"; import { getTransformsForSkeletonLayer, getTransformsForSkeletonLayerOrNull, } from "./dataset_accessor"; -import { invertTransform, transformPointUnscaled } from "../helpers/transformation_helpers"; export function getSkeletonTracing(tracing: Tracing): Maybe { if (tracing.skeleton != null) { diff --git a/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts b/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts index 35dfe4c2b6b..f7575ec5874 100644 --- a/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts @@ -1,7 +1,16 @@ +import { + PricingPlanEnum, + getFeatureNotAvailableInPlanMessage, + isFeatureAllowedByPricingPlan, +} from "admin/organization/pricing_plan_utils"; import memoizeOne from "memoize-one"; import { type AnnotationTool, IdentityTransform } from "oxalis/constants"; import { AnnotationToolEnum } from "oxalis/constants"; -import type { OxalisState } from "oxalis/store"; +import { + getTransformsPerLayer, + getVisibleSegmentationLayer, +} from "oxalis/model/accessors/dataset_accessor"; +import { isMagRestrictionViolated } from "oxalis/model/accessors/flycam_accessor"; import { type AgglomerateState, getActiveSegmentationTracing, @@ -9,19 +18,10 @@ import { hasAgglomerateMapping, isVolumeAnnotationDisallowedForZoom, } from "oxalis/model/accessors/volumetracing_accessor"; -import { - getTransformsPerLayer, - getVisibleSegmentationLayer, -} from "oxalis/model/accessors/dataset_accessor"; -import { isMagRestrictionViolated } from "oxalis/model/accessors/flycam_accessor"; +import type { OxalisState } from "oxalis/store"; import type { APIOrganization, APIUser } from "types/api_flow_types"; -import { - getFeatureNotAvailableInPlanMessage, - isFeatureAllowedByPricingPlan, - PricingPlanEnum, -} from "admin/organization/pricing_plan_utils"; -import { isSkeletonLayerTransformed } from "./skeletontracing_accessor"; import { reuseInstanceOnEquality } from "./accessor_helpers"; +import { isSkeletonLayerTransformed } from "./skeletontracing_accessor"; const zoomInToUseToolMessage = "Please zoom in further to use this tool. If you want to edit volume data on this zoom level, create an annotation with restricted magnifications from the extended annotation menu in the dashboard."; diff --git a/frontend/javascripts/oxalis/model/accessors/tracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/tracing_accessor.ts index 3cc0b15b089..60565bb4638 100644 --- a/frontend/javascripts/oxalis/model/accessors/tracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/tracing_accessor.ts @@ -1,3 +1,5 @@ +import type { Vector3 } from "oxalis/constants"; +import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import type { EditableMapping, OxalisState, @@ -9,9 +11,7 @@ import type { } from "oxalis/store"; import type { ServerTracing, TracingType } from "types/api_flow_types"; import { TracingTypeEnum } from "types/api_flow_types"; -import type { SaveQueueType } from "oxalis/model/actions/save_actions"; import BoundingBox from "../bucket_data_handling/bounding_box"; -import type { Vector3 } from "oxalis/constants"; export function maybeGetSomeTracing( tracing: Tracing, diff --git a/frontend/javascripts/oxalis/model/accessors/user_accessor.ts b/frontend/javascripts/oxalis/model/accessors/user_accessor.ts index 9ce49874e65..d8fc963b6a5 100644 --- a/frontend/javascripts/oxalis/model/accessors/user_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/user_accessor.ts @@ -1,7 +1,7 @@ -import type { APIUser, APIUserBase } from "types/api_flow_types"; -import messages from "messages"; import _ from "lodash"; import memoizeOne from "memoize-one"; +import messages from "messages"; +import type { APIUser, APIUserBase } from "types/api_flow_types"; export function enforceActiveUser(activeUser: APIUser | null | undefined): APIUser { if (activeUser) { diff --git a/frontend/javascripts/oxalis/model/accessors/view_mode_accessor.ts b/frontend/javascripts/oxalis/model/accessors/view_mode_accessor.ts index ae3d4236d09..33d0a79f08e 100644 --- a/frontend/javascripts/oxalis/model/accessors/view_mode_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/view_mode_accessor.ts @@ -1,25 +1,25 @@ -import memoizeOne from "memoize-one"; +import { V3 } from "libs/mjs"; import _ from "lodash"; -import type { Flycam, OxalisState } from "oxalis/store"; +import memoizeOne from "memoize-one"; import type { - OrthoViewExtents, - Rect, - Viewport, OrthoView, + OrthoViewExtents, Point2, + Rect, + Vector2, Vector3, ViewMode, - Vector2, + Viewport, } from "oxalis/constants"; import constants, { ArbitraryViewport, OrthoViews, OrthoViewValuesWithoutTDView, } from "oxalis/constants"; -import { V3 } from "libs/mjs"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; -import { getPosition } from "oxalis/model/accessors/flycam_accessor"; import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; +import { getPosition } from "oxalis/model/accessors/flycam_accessor"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import type { Flycam, OxalisState } from "oxalis/store"; export function getTDViewportSize(state: OxalisState): [number, number] { const camera = state.viewModeData.plane.tdCamera; diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts index a3811acae5a..4534f4c33c1 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts @@ -1,26 +1,7 @@ +import { V3 } from "libs/mjs"; +import _ from "lodash"; import memoizeOne from "memoize-one"; -import type { - APIAnnotation, - APIAnnotationInfo, - APIDataLayer, - APIDataset, - APISegmentationLayer, - AdditionalCoordinate, - AnnotationLayerDescriptor, - ServerTracing, - ServerVolumeTracing, -} from "types/api_flow_types"; -import type { - ActiveMappingInfo, - HybridTracing, - LabelAction, - OxalisState, - Segment, - SegmentGroup, - SegmentMap, - Tracing, - VolumeTracing, -} from "oxalis/store"; +import messages from "messages"; import { type AnnotationTool, type ContourMode, @@ -29,36 +10,55 @@ import { type Vector4, } from "oxalis/constants"; import { AnnotationToolEnum, VolumeTools } from "oxalis/constants"; +import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; import { - getMappingInfo, + getDataLayers, + getLayerByName, getMagInfo, + getMappingInfo, getSegmentationLayerByName, getSegmentationLayers, - getVisibleSegmentationLayer, - getDataLayers, - getLayerByName, getVisibleOrLastSegmentationLayer, + getVisibleSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; -import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; import { - getFlooredPosition, getActiveMagIndexForLayer, getAdditionalCoordinatesAsString, + getFlooredPosition, } from "oxalis/model/accessors/flycam_accessor"; -import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; -import { V3 } from "libs/mjs"; +import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; import { jsConvertCellIdToRGBA } from "oxalis/shaders/segmentation.glsl"; import { jsRgb2hsl } from "oxalis/shaders/utils.glsl"; -import { MagInfo } from "../helpers/mag_info"; -import messages from "messages"; +import { Store } from "oxalis/singletons"; +import type { + ActiveMappingInfo, + HybridTracing, + LabelAction, + OxalisState, + Segment, + SegmentGroup, + SegmentMap, + Tracing, + VolumeTracing, +} from "oxalis/store"; +import type { SegmentHierarchyNode } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import { MISSING_GROUP_ID, getGroupByIdWithSubgroups, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import { Store } from "oxalis/singletons"; +import type { + APIAnnotation, + APIAnnotationInfo, + APIDataLayer, + APIDataset, + APISegmentationLayer, + AdditionalCoordinate, + AnnotationLayerDescriptor, + ServerTracing, + ServerVolumeTracing, +} from "types/api_flow_types"; import { setSelectedSegmentsOrGroupAction } from "../actions/volumetracing_actions"; -import _ from "lodash"; -import type { SegmentHierarchyNode } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import { MagInfo } from "../helpers/mag_info"; export function getVolumeTracings(tracing: Tracing): Array { return tracing.volumes; diff --git a/frontend/javascripts/oxalis/model/actions/actions.ts b/frontend/javascripts/oxalis/model/actions/actions.ts index 96ff28c2297..9ce4cd51a06 100644 --- a/frontend/javascripts/oxalis/model/actions/actions.ts +++ b/frontend/javascripts/oxalis/model/actions/actions.ts @@ -1,8 +1,11 @@ import type { AnnotationActionTypes } from "oxalis/model/actions/annotation_actions"; +import type { ConnectomeAction } from "oxalis/model/actions/connectome_actions"; import type { DatasetAction } from "oxalis/model/actions/dataset_actions"; import type { FlycamAction } from "oxalis/model/actions/flycam_actions"; -import type { SegmentationAction } from "oxalis/model/actions/segmentation_actions"; +import type { OrganizationAction } from "oxalis/model/actions/organization_actions"; +import type { ProofreadAction } from "oxalis/model/actions/proofread_actions"; import type { SaveAction } from "oxalis/model/actions/save_actions"; +import type { SegmentationAction } from "oxalis/model/actions/segmentation_actions"; import type { SettingAction } from "oxalis/model/actions/settings_actions"; import type { SkeletonTracingAction } from "oxalis/model/actions/skeletontracing_actions"; import type { TaskAction } from "oxalis/model/actions/task_actions"; @@ -10,9 +13,6 @@ import type { UiAction } from "oxalis/model/actions/ui_actions"; import type { UserAction } from "oxalis/model/actions/user_actions"; import type { ViewModeAction } from "oxalis/model/actions/view_mode_actions"; import type { VolumeTracingAction } from "oxalis/model/actions/volumetracing_actions"; -import type { ConnectomeAction } from "oxalis/model/actions/connectome_actions"; -import type { ProofreadAction } from "oxalis/model/actions/proofread_actions"; -import type { OrganizationAction } from "oxalis/model/actions/organization_actions"; export type EscalateErrorAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts index 7879b2f17de..a0795d0b541 100644 --- a/frontend/javascripts/oxalis/model/actions/annotation_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/annotation_actions.ts @@ -1,11 +1,6 @@ -import type { - APIAnnotationVisibility, - APIDataLayer, - APIDataset, - APIMeshFile, - APIUserCompact, - EditableLayerProperties, -} from "types/api_flow_types"; +import Deferred from "libs/async/deferred"; +import _ from "lodash"; +import type { Vector3 } from "oxalis/constants"; import type { Annotation, MappingType, @@ -13,10 +8,15 @@ import type { UserBoundingBoxWithoutId, UserBoundingBoxWithoutIdMaybe, } from "oxalis/store"; -import type { Vector3 } from "oxalis/constants"; -import _ from "lodash"; import type { Dispatch } from "redux"; -import Deferred from "libs/async/deferred"; +import type { + APIAnnotationVisibility, + APIDataLayer, + APIDataset, + APIMeshFile, + APIUserCompact, + EditableLayerProperties, +} from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; type InitializeAnnotationAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/flycam_actions.ts b/frontend/javascripts/oxalis/model/actions/flycam_actions.ts index adfb377204e..9dd78ad8a11 100644 --- a/frontend/javascripts/oxalis/model/actions/flycam_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/flycam_actions.ts @@ -1,4 +1,4 @@ -import type { Vector3, OrthoView } from "oxalis/constants"; +import type { OrthoView, Vector3 } from "oxalis/constants"; import type { AdditionalCoordinate } from "types/api_flow_types"; type ZoomInAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/proofread_actions.ts b/frontend/javascripts/oxalis/model/actions/proofread_actions.ts index 25cec83cca5..ba13535f793 100644 --- a/frontend/javascripts/oxalis/model/actions/proofread_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/proofread_actions.ts @@ -1,6 +1,6 @@ import type { Vector3 } from "oxalis/constants"; -import type { AdditionalCoordinate } from "types/api_flow_types"; import type { Tree } from "oxalis/store"; +import type { AdditionalCoordinate } from "types/api_flow_types"; export type ProofreadAtPositionAction = ReturnType; export type ClearProofreadingByProductsAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/save_actions.ts b/frontend/javascripts/oxalis/model/actions/save_actions.ts index 94ecb79d769..6ab21e5c3ce 100644 --- a/frontend/javascripts/oxalis/model/actions/save_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/save_actions.ts @@ -1,12 +1,12 @@ -import type { Dispatch } from "redux"; +import Deferred from "libs/async/deferred"; +import Date from "libs/date"; +import { getUid } from "libs/uid_generator"; import type { UpdateAction, - UpdateActionWithoutIsolationRequirement, UpdateActionWithIsolationRequirement, + UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; -import { getUid } from "libs/uid_generator"; -import Date from "libs/date"; -import Deferred from "libs/async/deferred"; +import type { Dispatch } from "redux"; export type SaveQueueType = "skeleton" | "volume" | "mapping"; export type PushSaveQueueTransaction = { diff --git a/frontend/javascripts/oxalis/model/actions/settings_actions.ts b/frontend/javascripts/oxalis/model/actions/settings_actions.ts index 6c2b87abea1..4b7387e4b41 100644 --- a/frontend/javascripts/oxalis/model/actions/settings_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/settings_actions.ts @@ -1,13 +1,13 @@ -import type { ViewMode, ControlMode } from "oxalis/constants"; +import Deferred from "libs/async/deferred"; +import type { ControlMode, ViewMode } from "oxalis/constants"; import type { - UserConfiguration, DatasetConfiguration, DatasetLayerConfiguration, - TemporaryConfiguration, Mapping, MappingType, + TemporaryConfiguration, + UserConfiguration, } from "oxalis/store"; -import Deferred from "libs/async/deferred"; import type { APIHistogramData } from "types/api_flow_types"; export type UpdateUserSettingAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx b/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx index a5fd84d8b7c..5fc217b6581 100644 --- a/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx +++ b/frontend/javascripts/oxalis/model/actions/skeletontracing_actions.tsx @@ -13,7 +13,7 @@ import Store from "oxalis/store"; import RemoveTreeModal from "oxalis/view/remove_tree_modal"; import type { Key } from "react"; import { batchActions } from "redux-batched-actions"; -import type { ServerSkeletonTracing, MetadataEntryProto } from "types/api_flow_types"; +import type { MetadataEntryProto, ServerSkeletonTracing } from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; export type InitializeSkeletonTracingAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/ui_actions.ts b/frontend/javascripts/oxalis/model/actions/ui_actions.ts index de17e37210c..6276d195e46 100644 --- a/frontend/javascripts/oxalis/model/actions/ui_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/ui_actions.ts @@ -1,5 +1,5 @@ import type { AnnotationTool, OrthoView, Vector3 } from "oxalis/constants"; -import type { OxalisState, BorderOpenStatus, Theme } from "oxalis/store"; +import type { BorderOpenStatus, OxalisState, Theme } from "oxalis/store"; import type { StartAIJobModalState } from "oxalis/view/action-bar/starting_job_modals"; type SetDropzoneModalVisibilityAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/view_mode_actions.ts b/frontend/javascripts/oxalis/model/actions/view_mode_actions.ts index aa9d65d459a..60129ba164d 100644 --- a/frontend/javascripts/oxalis/model/actions/view_mode_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/view_mode_actions.ts @@ -1,8 +1,8 @@ +import type { OrthoView, Point2, Rect, Viewport, ViewportRects } from "oxalis/constants"; +import constants from "oxalis/constants"; import { getTDViewportSize } from "oxalis/model/accessors/view_mode_accessor"; import type { PartialCameraData } from "oxalis/store"; import Store from "oxalis/store"; -import type { OrthoView, Point2, Rect, Viewport, ViewportRects } from "oxalis/constants"; -import constants from "oxalis/constants"; type SetViewportAction = ReturnType; type SetTDCameraAction = ReturnType; type CenterTDViewAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts index 9860f2b89f2..36a3e38e7ff 100644 --- a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts @@ -1,12 +1,12 @@ -import type { ServerEditableMapping, ServerVolumeTracing } from "types/api_flow_types"; -import type { Vector2, Vector3, OrthoView, ContourMode, BucketAddress } from "oxalis/constants"; +import Deferred from "libs/async/deferred"; +import type { BucketAddress, ContourMode, OrthoView, Vector2, Vector3 } from "oxalis/constants"; +import type { QuickSelectGeometry } from "oxalis/geometries/helper_geometries"; +import { AllUserBoundingBoxActions } from "oxalis/model/actions/annotation_actions"; import type { BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; import type { NumberLike, Segment, SegmentGroup, SegmentMap } from "oxalis/store"; -import Deferred from "libs/async/deferred"; import type { Dispatch } from "redux"; -import { AllUserBoundingBoxActions } from "oxalis/model/actions/annotation_actions"; -import type { QuickSelectGeometry } from "oxalis/geometries/helper_geometries"; import { batchActions } from "redux-batched-actions"; +import type { ServerEditableMapping, ServerVolumeTracing } from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; export type InitializeVolumeTracingAction = ReturnType; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.ts index 9769bc95984..f5ae1e78fd0 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.ts @@ -1,10 +1,10 @@ -import _ from "lodash"; import { V3 } from "libs/mjs"; import { map3, mod } from "libs/utils"; +import _ from "lodash"; import type { BoundingBoxType, OrthoView, Vector2, Vector3, Vector4 } from "oxalis/constants"; import constants, { Vector3Indicies } from "oxalis/constants"; -import type { MagInfo } from "../helpers/mag_info"; import Dimensions from "../dimensions"; +import type { MagInfo } from "../helpers/mag_info"; class BoundingBox { min: Vector3; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts index 6bc025d7b67..63c7ed11288 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts @@ -1,21 +1,21 @@ -import { createNanoEvents, type Emitter } from "nanoevents"; -import * as THREE from "three"; -import _ from "lodash"; -import type { ElementClass } from "types/api_flow_types"; -import { PullQueueConstants } from "oxalis/model/bucket_data_handling/pullqueue"; -import type { MaybeUnmergedBucketLoadedPromise } from "oxalis/model/actions/volumetracing_actions"; -import { addBucketToUndoAction } from "oxalis/model/actions/volumetracing_actions"; -import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_converter"; +import ErrorHandling from "libs/error_handling"; import { castForArrayType, mod } from "libs/utils"; +import window from "libs/window"; +import _ from "lodash"; +import { type Emitter, createNanoEvents } from "nanoevents"; import type { BoundingBoxType, BucketAddress, Vector3 } from "oxalis/constants"; import Constants from "oxalis/constants"; +import type { MaybeUnmergedBucketLoadedPromise } from "oxalis/model/actions/volumetracing_actions"; +import { addBucketToUndoAction } from "oxalis/model/actions/volumetracing_actions"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import ErrorHandling from "libs/error_handling"; -import Store from "oxalis/store"; +import { PullQueueConstants } from "oxalis/model/bucket_data_handling/pullqueue"; import type TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bucket_manager"; -import window from "libs/window"; -import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor"; +import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_converter"; +import Store from "oxalis/store"; +import * as THREE from "three"; +import type { ElementClass } from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor"; export enum BucketStateEnum { UNREQUESTED = "UNREQUESTED", diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker.ts index 7c2e0ad7d98..a3e9a2a4fce 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker.ts @@ -1,14 +1,14 @@ -import type { EnqueueFunction } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import type { Matrix4x4 } from "libs/mjs"; import { M4x4, V3 } from "libs/mjs"; +import { map3, map4, mod } from "libs/utils"; +import type { BucketAddress, Vector3, Vector4 } from "oxalis/constants"; +import constants from "oxalis/constants"; +import type { EnqueueFunction } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import { globalPositionToBucketPosition, globalPositionToBucketPositionFloat, zoomedAddressToAnotherZoomStep, } from "oxalis/model/helpers/position_converter"; -import type { BucketAddress, Vector3, Vector4 } from "oxalis/constants"; -import constants from "oxalis/constants"; -import { map3, map4, mod } from "libs/utils"; const aggregatePerDimension = ( aggregateFn: (...args: number[]) => number, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker.ts index 7585718a1a0..b1051ed5cba 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker.ts @@ -1,13 +1,13 @@ -import _ from "lodash"; -import type { EnqueueFunction } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; +import ThreeDMap from "libs/ThreeDMap"; import type { Matrix4x4 } from "libs/mjs"; import { M4x4, V3 } from "libs/mjs"; -import { chunk2 } from "oxalis/model/helpers/chunk"; -import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; -import ThreeDMap from "libs/ThreeDMap"; +import _ from "lodash"; import type { OrthoViewWithoutTD, Vector2, Vector3, Vector4, ViewMode } from "oxalis/constants"; import constants from "oxalis/constants"; import traverse from "oxalis/model/bucket_data_handling/bucket_traversals"; +import type { EnqueueFunction } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; +import { chunk2 } from "oxalis/model/helpers/chunk"; +import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; import type { LoadingStrategy, PlaneRects } from "oxalis/store"; import { MAX_ZOOM_STEP_DIFF, getPriorityWeightForZoomStepDiff } from "../loading_strategy_logic"; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts index 1a623cd89e5..11eb211e1d8 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts @@ -1,10 +1,10 @@ import { V3 } from "libs/mjs"; +import { mod } from "libs/utils"; // Attention: Note that the implemented paper uses the term "voxel" for the unit import type { Vector3 } from "oxalis/constants"; import { - globalPositionToBucketPosition, getBucketExtent, + globalPositionToBucketPosition, } from "oxalis/model/helpers/position_converter"; -import { mod } from "libs/utils"; // Attention: Note that the implemented paper uses the term "voxel" for the unit // we usually refer to as bucket. This is reflected in comments as well as variable naming. // This module implements the algorithm presented in this paper: // "A Fast Voxel Traversal Algorithm for Ray Tracing" (http://www.cse.yorku.ca/~amana/research/grid.pdf) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts index 1df073770d6..722eacac8e1 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts @@ -1,40 +1,40 @@ -import _ from "lodash"; -import { createNanoEvents, type Emitter } from "nanoevents"; -import type { Bucket, BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; -import { DataBucket, NULL_BUCKET, NullBucket } from "oxalis/model/bucket_data_handling/bucket"; -import type { AdditionalAxis, ElementClass } from "types/api_flow_types"; -import type { ProgressCallback } from "libs/progress_callback"; +import ErrorHandling from "libs/error_handling"; import { V3 } from "libs/mjs"; -import { VoxelNeighborQueue2D, VoxelNeighborQueue3D } from "oxalis/model/volumetracing/volumelayer"; +import type { ProgressCallback } from "libs/progress_callback"; +import Toast from "libs/toast"; import { areBoundingBoxesOverlappingOrTouching, castForArrayType, isNumberMap, union, } from "libs/utils"; +import _ from "lodash"; +import { type Emitter, createNanoEvents } from "nanoevents"; +import type { + BoundingBoxType, + BucketAddress, + LabelMasksByBucketAndW, + Vector3, +} from "oxalis/constants"; +import constants, { MappingStatusEnum } from "oxalis/constants"; import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; -import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; -import type { DimensionMap } from "oxalis/model/dimensions"; -import Dimensions from "oxalis/model/dimensions"; -import ErrorHandling from "libs/error_handling"; +import type { Bucket, BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; +import { DataBucket, NULL_BUCKET, NullBucket } from "oxalis/model/bucket_data_handling/bucket"; import type PullQueue from "oxalis/model/bucket_data_handling/pullqueue"; import type PushQueue from "oxalis/model/bucket_data_handling/pushqueue"; +import TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bucket_manager"; +import type { DimensionMap } from "oxalis/model/dimensions"; +import Dimensions from "oxalis/model/dimensions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; +import { VoxelNeighborQueue2D, VoxelNeighborQueue3D } from "oxalis/model/volumetracing/volumelayer"; import type { Mapping } from "oxalis/store"; import Store from "oxalis/store"; -import TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bucket_manager"; -import Toast from "libs/toast"; -import type { - Vector3, - BoundingBoxType, - LabelMasksByBucketAndW, - BucketAddress, -} from "oxalis/constants"; -import constants, { MappingStatusEnum } from "oxalis/constants"; -import type { MagInfo } from "../helpers/mag_info"; +import type { AdditionalAxis, ElementClass } from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { MagInfo } from "../helpers/mag_info"; const warnAboutTooManyAllocations = _.once(() => { const msg = diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_rendering_logic.tsx b/frontend/javascripts/oxalis/model/bucket_data_handling/data_rendering_logic.tsx index 2b09d17f8fb..dcfe615b1cf 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_rendering_logic.tsx +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_rendering_logic.tsx @@ -1,9 +1,9 @@ -import _ from "lodash"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; import { document } from "libs/window"; +import _ from "lodash"; import constants from "oxalis/constants"; import type { ElementClass } from "types/api_flow_types"; -import Toast from "libs/toast"; -import ErrorHandling from "libs/error_handling"; type GpuSpecs = { supportedTextureSize: number; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts index 31ba39ac814..9059c348930 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts @@ -1,37 +1,37 @@ -import type * as THREE from "three"; -import _ from "lodash"; -import memoizeOne from "memoize-one"; -import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; +import app from "app"; +import type UpdatableTexture from "libs/UpdatableTexture"; +import LatestTaskExecutor, { SKIPPED_TASK_REASON } from "libs/async/latest_task_executor"; +import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; +import { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; +import DiffableMap from "libs/diffable_map"; import { M4x4, type Matrix4x4 } from "libs/mjs"; -import { createWorker } from "oxalis/workers/comlink_wrapper"; import { map3 } from "libs/utils"; +import _ from "lodash"; +import memoizeOne from "memoize-one"; +import type { BucketAddress, Vector3, Vector4, ViewMode } from "oxalis/constants"; import { getByteCount, getElementClass, - isLayerVisible, getLayerByName, getMagInfo, - invertAndTranspose, getTransformsForLayer, + invertAndTranspose, + isLayerVisible, } from "oxalis/model/accessors/dataset_accessor"; -import AsyncBucketPickerWorker from "oxalis/workers/async_bucket_picker.worker"; +import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import LatestTaskExecutor, { SKIPPED_TASK_REASON } from "libs/async/latest_task_executor"; import type PullQueue from "oxalis/model/bucket_data_handling/pullqueue"; -import Store, { type PlaneRects, type SegmentMap } from "oxalis/store"; import TextureBucketManager from "oxalis/model/bucket_data_handling/texture_bucket_manager"; -import type UpdatableTexture from "libs/UpdatableTexture"; -import type { ViewMode, Vector3, Vector4, BucketAddress } from "oxalis/constants"; import shaderEditor from "oxalis/model/helpers/shader_editor"; -import DiffableMap from "libs/diffable_map"; -import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; -import { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; +import Store, { type PlaneRects, type SegmentMap } from "oxalis/store"; +import AsyncBucketPickerWorker from "oxalis/workers/async_bucket_picker.worker"; +import { createWorker } from "oxalis/workers/comlink_wrapper"; +import type * as THREE from "three"; +import type { AdditionalCoordinate } from "types/api_flow_types"; +import { getViewportRects } from "../accessors/view_mode_accessor"; +import { getSegmentsForLayer } from "../accessors/volumetracing_accessor"; import { listenToStoreProperty } from "../helpers/listener_helpers"; import { cachedDiffSegmentLists } from "../sagas/volumetracing_saga"; -import { getSegmentsForLayer } from "../accessors/volumetracing_accessor"; -import { getViewportRects } from "../accessors/view_mode_accessor"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import app from "app"; const CUSTOM_COLORS_TEXTURE_WIDTH = 512; // 256**2 (entries) * 0.25 (load capacity) / 8 (layers) == 2048 buckets/layer diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts index 52a1cfdc9e5..cb131766365 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts @@ -1,20 +1,20 @@ +import { message } from "antd"; +import type UpdatableTexture from "libs/UpdatableTexture"; +import { CuckooTableUint32 } from "libs/cuckoo/cuckoo_table_uint32"; +import { CuckooTableUint64 } from "libs/cuckoo/cuckoo_table_uint64"; +import Toast from "libs/toast"; +import { diffMaps } from "libs/utils"; import _ from "lodash"; +import memoizeOne from "memoize-one"; import { - getMappings, - getMappingInfo, getElementClass, + getMappingInfo, + getMappings, } from "oxalis/model/accessors/dataset_accessor"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { finishMappingInitializationAction } from "oxalis/model/actions/settings_actions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import type { Mapping, NumberLike } from "oxalis/store"; import Store from "oxalis/store"; -import type UpdatableTexture from "libs/UpdatableTexture"; -import { CuckooTableUint64 } from "libs/cuckoo/cuckoo_table_uint64"; -import { CuckooTableUint32 } from "libs/cuckoo/cuckoo_table_uint32"; -import { message } from "antd"; -import { diffMaps } from "libs/utils"; -import memoizeOne from "memoize-one"; -import Toast from "libs/toast"; // With the default load factor of 0.9, this suffices for mapping // ~15M uint32 ids. diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.ts index dc7c6230e45..f9fcd70245a 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.ts @@ -1,12 +1,12 @@ -import { AbstractPrefetchStrategy } from "oxalis/model/bucket_data_handling/prefetch_strategy_plane"; -import type { BoundingBoxType, Vector3 } from "oxalis/constants"; import type { Matrix4x4 } from "libs/mjs"; import { M4x4, V3 } from "libs/mjs"; +import type { BoundingBoxType, Vector3 } from "oxalis/constants"; +import PolyhedronRasterizer from "oxalis/model/bucket_data_handling/polyhedron_rasterizer"; +import { AbstractPrefetchStrategy } from "oxalis/model/bucket_data_handling/prefetch_strategy_plane"; import type { PullQueueItem } from "oxalis/model/bucket_data_handling/pullqueue"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; -import PolyhedronRasterizer from "oxalis/model/bucket_data_handling/polyhedron_rasterizer"; -import type { MagInfo } from "../helpers/mag_info"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { MagInfo } from "../helpers/mag_info"; export class PrefetchStrategyArbitrary extends AbstractPrefetchStrategy { velocityRangeStart = 0; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.ts index 52f78220e64..8b1244e88ad 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.ts @@ -1,15 +1,15 @@ import _ from "lodash"; +import type { OrthoView, OrthoViewMap, Vector3, Vector4 } from "oxalis/constants"; +import constants, { OrthoViewValuesWithoutTDView } from "oxalis/constants"; import type { Area } from "oxalis/model/accessors/flycam_accessor"; -import type { PullQueueItem } from "oxalis/model/bucket_data_handling/pullqueue"; -import { zoomedAddressToAnotherZoomStep } from "oxalis/model/helpers/position_converter"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; +import { getPriorityWeightForPrefetch } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; +import type { PullQueueItem } from "oxalis/model/bucket_data_handling/pullqueue"; import type { DimensionIndices } from "oxalis/model/dimensions"; import Dimensions from "oxalis/model/dimensions"; -import type { OrthoView, OrthoViewMap, Vector3, Vector4 } from "oxalis/constants"; -import constants, { OrthoViewValuesWithoutTDView } from "oxalis/constants"; -import { getPriorityWeightForPrefetch } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; -import type { MagInfo } from "../helpers/mag_info"; +import { zoomedAddressToAnotherZoomStep } from "oxalis/model/helpers/position_converter"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { MagInfo } from "../helpers/mag_info"; const { MAX_ZOOM_STEP_DIFF_PREFETCH } = constants; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts index d9c87af9613..7303a1e8191 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts @@ -1,11 +1,11 @@ import PriorityQueue from "js-priority-queue"; -import { getLayerByName } from "oxalis/model/accessors/dataset_accessor"; -import { requestWithFallback } from "oxalis/model/bucket_data_handling/wkstore_adapter"; +import { asAbortable, sleep } from "libs/utils"; import type { BucketAddress } from "oxalis/constants"; +import { getLayerByName } from "oxalis/model/accessors/dataset_accessor"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; +import { requestWithFallback } from "oxalis/model/bucket_data_handling/wkstore_adapter"; import type { DataStoreInfo } from "oxalis/store"; import Store from "oxalis/store"; -import { asAbortable, sleep } from "libs/utils"; export type PullQueueItem = { priority: number; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts index 4827ced6328..bf550b162b1 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pushqueue.ts @@ -1,13 +1,13 @@ +import { AsyncFifoResolver } from "libs/async/async_fifo_resolver"; +import { createDebouncedAbortableParameterlessCallable } from "libs/async/debounced_abortable_saga"; import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; -import { createCompressedUpdateBucketActions } from "oxalis/model/bucket_data_handling/wkstore_adapter"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import { createDebouncedAbortableParameterlessCallable } from "libs/async/debounced_abortable_saga"; -import { call } from "redux-saga/effects"; +import { createCompressedUpdateBucketActions } from "oxalis/model/bucket_data_handling/wkstore_adapter"; import Store from "oxalis/store"; +import { call } from "redux-saga/effects"; +import { escalateErrorAction } from "../actions/actions"; import { pushSaveQueueTransaction } from "../actions/save_actions"; import type { UpdateActionWithoutIsolationRequirement } from "../sagas/update_actions"; -import { AsyncFifoResolver } from "libs/async/async_fifo_resolver"; -import { escalateErrorAction } from "../actions/actions"; // Only process the PushQueue after there was no user interaction (or bucket modification due to // downsampling) for PUSH_DEBOUNCE_TIME milliseconds. diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts index f2cfe80c03d..ecf87c69198 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts @@ -1,21 +1,21 @@ -import * as THREE from "three"; +import app from "app"; +import type UpdatableTexture from "libs/UpdatableTexture"; +import type { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; +import { waitForCondition } from "libs/utils"; +import window from "libs/window"; import _ from "lodash"; -import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; +import { WkDevFlags } from "oxalis/api/wk_dev"; +import constants from "oxalis/constants"; +import { getRenderer } from "oxalis/controller/renderer"; import { createUpdatableTexture } from "oxalis/geometries/materials/plane_material_factory_helpers"; +import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; import { getBucketCapacity, - getPackingDegree, getChannelCount, + getPackingDegree, } from "oxalis/model/bucket_data_handling/data_rendering_logic"; -import { getRenderer } from "oxalis/controller/renderer"; -import { waitForCondition } from "libs/utils"; -import type UpdatableTexture from "libs/UpdatableTexture"; -import constants from "oxalis/constants"; -import window from "libs/window"; +import * as THREE from "three"; import type { ElementClass } from "types/api_flow_types"; -import type { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; -import app from "app"; -import { WkDevFlags } from "oxalis/api/wk_dev"; // A TextureBucketManager instance is responsible for making buckets available // to the GPU. diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index c1e4662b854..fb6aa0f98b9 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -1,34 +1,34 @@ -import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; -import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_converter"; -import { createWorker } from "oxalis/workers/comlink_wrapper"; import { doWithToken } from "admin/admin_rest_api"; +import ErrorHandling from "libs/error_handling"; +import Request from "libs/request"; +import { parseMaybe } from "libs/utils"; +import WebworkerPool from "libs/webworker_pool"; +import window from "libs/window"; +import _ from "lodash"; +import type { BucketAddress, Vector3 } from "oxalis/constants"; +import constants, { MappingStatusEnum } from "oxalis/constants"; import { - isSegmentationLayer, getByteCountFromLayer, getMagInfo, getMappingInfo, + isSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; import { getVolumeTracingById, needsLocalHdf5Mapping, } from "oxalis/model/accessors/volumetracing_accessor"; -import { parseMaybe } from "libs/utils"; +import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; +import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_converter"; import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; import { updateBucket } from "oxalis/model/sagas/update_actions"; -import ByteArraysToLz4Base64Worker from "oxalis/workers/byte_arrays_to_lz4_base64.worker"; -import DecodeFourBitWorker from "oxalis/workers/decode_four_bit.worker"; -import ErrorHandling from "libs/error_handling"; -import Request from "libs/request"; import type { DataLayerType, VolumeTracing } from "oxalis/store"; import Store from "oxalis/store"; -import WebworkerPool from "libs/webworker_pool"; -import type { BucketAddress, Vector3 } from "oxalis/constants"; -import constants, { MappingStatusEnum } from "oxalis/constants"; -import window from "libs/window"; +import ByteArraysToLz4Base64Worker from "oxalis/workers/byte_arrays_to_lz4_base64.worker"; +import { createWorker } from "oxalis/workers/comlink_wrapper"; +import DecodeFourBitWorker from "oxalis/workers/decode_four_bit.worker"; +import type { AdditionalCoordinate } from "types/api_flow_types"; import { getGlobalDataConnectionInfo } from "../data_connection_info"; import type { MagInfo } from "../helpers/mag_info"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import _ from "lodash"; const decodeFourBit = createWorker(DecodeFourBitWorker); diff --git a/frontend/javascripts/oxalis/model/data_connection_info.ts b/frontend/javascripts/oxalis/model/data_connection_info.ts index 5591260fee8..f61be1718a1 100644 --- a/frontend/javascripts/oxalis/model/data_connection_info.ts +++ b/frontend/javascripts/oxalis/model/data_connection_info.ts @@ -1,5 +1,5 @@ -import _ from "lodash"; import window from "libs/window"; +import _ from "lodash"; const CONSIDERED_TIMESPAN_IN_STATS = 5000; diff --git a/frontend/javascripts/oxalis/model/data_layer.ts b/frontend/javascripts/oxalis/model/data_layer.ts index c5fb2fe6c39..08a05c39683 100644 --- a/frontend/javascripts/oxalis/model/data_layer.ts +++ b/frontend/javascripts/oxalis/model/data_layer.ts @@ -1,7 +1,7 @@ +import ErrorHandling from "libs/error_handling"; import type { Vector3 } from "oxalis/constants"; import { getLayerBoundingBox, getMagInfo } from "oxalis/model/accessors/dataset_accessor"; import DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import ErrorHandling from "libs/error_handling"; import LayerRenderingManager from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import Mappings from "oxalis/model/bucket_data_handling/mappings"; import PullQueue from "oxalis/model/bucket_data_handling/pullqueue"; diff --git a/frontend/javascripts/oxalis/model/edge_collection.ts b/frontend/javascripts/oxalis/model/edge_collection.ts index d64c0550a54..0316fa6c458 100644 --- a/frontend/javascripts/oxalis/model/edge_collection.ts +++ b/frontend/javascripts/oxalis/model/edge_collection.ts @@ -1,7 +1,7 @@ -import _ from "lodash"; -import type { Edge } from "oxalis/store"; import DiffableMap, { diffDiffableMaps } from "libs/diffable_map"; import * as Utils from "libs/utils"; +import _ from "lodash"; +import type { Edge } from "oxalis/store"; type EdgeMap = DiffableMap>; export default class EdgeCollection { // Edge map keyed by the source id of the edges (outgoing) diff --git a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts index 45b027af15c..6b4a0eb417d 100644 --- a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts +++ b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts @@ -1,7 +1,7 @@ import _ from "lodash"; -import type { Dispatch } from "redux"; -import type { Action } from "oxalis/model/actions/actions"; import { WkDevFlags } from "oxalis/api/wk_dev"; +import type { Action } from "oxalis/model/actions/actions"; +import type { Dispatch } from "redux"; const MAX_ACTION_LOG_LENGTH = 250; let actionLog: string[] = []; diff --git a/frontend/javascripts/oxalis/model/helpers/bucket_compression.ts b/frontend/javascripts/oxalis/model/helpers/bucket_compression.ts index acbbe32f3d1..d0ea0da39f6 100644 --- a/frontend/javascripts/oxalis/model/helpers/bucket_compression.ts +++ b/frontend/javascripts/oxalis/model/helpers/bucket_compression.ts @@ -1,7 +1,7 @@ import type { BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket"; -import { createWorker } from "oxalis/workers/comlink_wrapper"; import compressLz4Block from "oxalis/workers/byte_array_lz4_compression.worker"; +import { createWorker } from "oxalis/workers/comlink_wrapper"; const _byteArrayToLz4Array = createWorker(compressLz4Block); diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts index 63efbb9106d..09f71aabc80 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_toggle_actions.ts @@ -4,16 +4,16 @@ // appropriate. // See compactToggleActions for the high-level logic of the compaction. import _ from "lodash"; -import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import type { UpdateActionWithoutIsolationRequirement, UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; import { updateTreeGroupVisibility, updateTreeVisibility } from "oxalis/model/sagas/update_actions"; +import type { SkeletonTracing, Tree, TreeGroup, TreeMap, VolumeTracing } from "oxalis/store"; import { + MISSING_GROUP_ID, createGroupToTreesMap, getGroupByIdWithSubgroups, - MISSING_GROUP_ID, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; type GroupNode = { children: GroupNode[]; diff --git a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts index 674c28c256c..81664049840 100644 --- a/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts +++ b/frontend/javascripts/oxalis/model/helpers/compaction/compact_update_actions.ts @@ -1,5 +1,6 @@ +import { withoutValues } from "libs/utils"; import _ from "lodash"; -import type { SkeletonTracing, VolumeTracing } from "oxalis/store"; +import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; import type { CreateEdgeUpdateAction, CreateNodeUpdateAction, @@ -9,8 +10,7 @@ import type { UpdateActionWithoutIsolationRequirement, } from "oxalis/model/sagas/update_actions"; import { moveTreeComponent } from "oxalis/model/sagas/update_actions"; -import compactToggleActions from "oxalis/model/helpers/compaction/compact_toggle_actions"; -import { withoutValues } from "libs/utils"; +import type { SkeletonTracing, VolumeTracing } from "oxalis/store"; // The Cantor pairing function assigns one natural number to each pair of natural numbers function cantor(a: number, b: number): number { diff --git a/frontend/javascripts/oxalis/model/helpers/deep_update_test.ts b/frontend/javascripts/oxalis/model/helpers/deep_update_test.ts index d871d45d776..b83036bbe59 100644 --- a/frontend/javascripts/oxalis/model/helpers/deep_update_test.ts +++ b/frontend/javascripts/oxalis/model/helpers/deep_update_test.ts @@ -5,8 +5,8 @@ // - perform multiple invalid calls which TS should catch (--> ts-expect-error) // - perform one valid call -import type { OxalisState } from "oxalis/store"; import { updateKey, updateKey2, updateKey3, updateKey4 } from "oxalis/model/helpers/deep_update"; +import type { OxalisState } from "oxalis/store"; export function test1(state: OxalisState) { // @ts-expect-error diff --git a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts index 4312fb8c941..1229dad3267 100644 --- a/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/nml_helpers.ts @@ -1,44 +1,44 @@ -// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'saxo... Remove this comment to see the full error message -import Saxophone from "saxophone"; +import Date from "libs/date"; +import DiffableMap from "libs/diffable_map"; +import * as Utils from "libs/utils"; +import { coalesce } from "libs/utils"; +import { location } from "libs/window"; import _ from "lodash"; -import type { APIBuildInfo, MetadataEntryProto } from "types/api_flow_types"; +import messages from "messages"; +import { + type BoundingBoxType, + IdentityTransform, + type TreeType, + TreeTypeEnum, + type Vector3, +} from "oxalis/constants"; +import Constants from "oxalis/constants"; +import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; +import EdgeCollection from "oxalis/model/edge_collection"; import { getMaximumGroupId, getMaximumTreeId, } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; -import Date from "libs/date"; -import DiffableMap from "libs/diffable_map"; -import EdgeCollection from "oxalis/model/edge_collection"; import type { - UserBoundingBox, + BoundingBoxObject, + MutableNode, + MutableTree, + MutableTreeMap, NodeMap, OxalisState, SkeletonTracing, - MutableTreeMap, Tracing, Tree, - MutableTree, TreeGroup, - BoundingBoxObject, - MutableNode, + UserBoundingBox, } from "oxalis/store"; import { findGroup } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import messages from "messages"; -import * as Utils from "libs/utils"; -import { - type BoundingBoxType, - IdentityTransform, - type TreeType, - TreeTypeEnum, - type Vector3, -} from "oxalis/constants"; -import Constants from "oxalis/constants"; -import { location } from "libs/window"; -import { coalesce } from "libs/utils"; +// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'saxo... Remove this comment to see the full error message +import Saxophone from "saxophone"; +import type { APIBuildInfo, MetadataEntryProto } from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; -import { getNodePosition } from "../accessors/skeletontracing_accessor"; import { getTransformsForSkeletonLayer } from "../accessors/dataset_accessor"; +import { getNodePosition } from "../accessors/skeletontracing_accessor"; // NML Defaults const DEFAULT_COLOR: Vector3 = [1, 0, 0]; diff --git a/frontend/javascripts/oxalis/model/helpers/overwrite_action_middleware.ts b/frontend/javascripts/oxalis/model/helpers/overwrite_action_middleware.ts index c51d2debc60..a8938eda548 100644 --- a/frontend/javascripts/oxalis/model/helpers/overwrite_action_middleware.ts +++ b/frontend/javascripts/oxalis/model/helpers/overwrite_action_middleware.ts @@ -1,5 +1,5 @@ -import type { Dispatch, MiddlewareAPI } from "redux"; import type { Action } from "oxalis/model/actions/actions"; +import type { Dispatch, MiddlewareAPI } from "redux"; type OverwriteFunction = (store: S, next: (action: A) => void, action: A) => A | Promise; const overwrites: Record> = {}; diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.ts b/frontend/javascripts/oxalis/model/helpers/position_converter.ts index 15cf9fb465d..c8f751bacaa 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.ts +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.ts @@ -1,4 +1,4 @@ -import type { Vector3, Vector4, BucketAddress } from "oxalis/constants"; +import type { BucketAddress, Vector3, Vector4 } from "oxalis/constants"; import constants from "oxalis/constants"; import type { AdditionalCoordinate } from "types/api_flow_types"; import type { MagInfo } from "./mag_info"; diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index 64cd2456e71..7059d477f99 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -1,14 +1,14 @@ -import { Root } from "protobufjs/light"; -import type { APITracingStoreAnnotation, ServerTracing } from "types/api_flow_types"; -// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'SkeletonTracing.proto' or its ... Remove this comment to see the full error message -import SkeletonTracingProto from "SkeletonTracing.proto"; -// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message -import VolumeTracingProto from "VolumeTracing.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'AnnotationProto.proto' or its co... Remove this comment to see the full error message import AnnotationProto from "Annotation.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'ListOfLong.proto' or its co... Remove this comment to see the full error message import ListOfLongProto from "ListOfLong.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'SkeletonTracing.proto' or its ... Remove this comment to see the full error message +import SkeletonTracingProto from "SkeletonTracing.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message +import VolumeTracingProto from "VolumeTracing.proto"; import { isBigInt } from "libs/utils"; +import { Root } from "protobufjs/light"; +import type { APITracingStoreAnnotation, ServerTracing } from "types/api_flow_types"; const PROTO_FILES = { skeleton: SkeletonTracingProto, diff --git a/frontend/javascripts/oxalis/model/helpers/reduce_reducers.ts b/frontend/javascripts/oxalis/model/helpers/reduce_reducers.ts index d9e6171753b..53ddb38150e 100644 --- a/frontend/javascripts/oxalis/model/helpers/reduce_reducers.ts +++ b/frontend/javascripts/oxalis/model/helpers/reduce_reducers.ts @@ -1,6 +1,6 @@ -import _ from "lodash"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'deep... Remove this comment to see the full error message import deepFreezeLib from "deep-freeze"; +import _ from "lodash"; // Do not use the deep-freeze library in production // process.env.NODE_ENV is being substituted by webpack let deepFreeze = deepFreezeLib; diff --git a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts index 11ea81b47e3..725a23fcb69 100644 --- a/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/annotation_reducer.ts @@ -1,16 +1,16 @@ import update from "immutability-helper"; -import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, UserBoundingBox, MeshInformation } from "oxalis/store"; import { V3 } from "libs/mjs"; -import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; -import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import * as Utils from "libs/utils"; -import { getDisplayedDataExtentInPlaneMode } from "oxalis/model/accessors/view_mode_accessor"; import _ from "lodash"; -import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; -import { getMeshesForAdditionalCoordinates } from "../accessors/volumetracing_accessor"; +import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; +import { getDisplayedDataExtentInPlaneMode } from "oxalis/model/accessors/view_mode_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; +import type { MeshInformation, OxalisState, UserBoundingBox } from "oxalis/store"; import type { AdditionalCoordinate } from "types/api_flow_types"; import { getDatasetBoundingBox } from "../accessors/dataset_accessor"; +import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; +import { getMeshesForAdditionalCoordinates } from "../accessors/volumetracing_accessor"; import BoundingBox from "../bucket_data_handling/bounding_box"; const updateTracing = (state: OxalisState, shape: Partial): OxalisState => diff --git a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts index e013de440f4..a41e0ab8531 100644 --- a/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/connectome_reducer.ts @@ -1,14 +1,14 @@ import Maybe from "data.maybe"; -import _ from "lodash"; import update from "immutability-helper"; +import _ from "lodash"; +import Constants from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SkeletonTracing, TreeMap } from "oxalis/store"; +import { updateKey3 } from "oxalis/model/helpers/deep_update"; import { addTreesAndGroups, getMaximumNodeId, } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { updateKey3 } from "oxalis/model/helpers/deep_update"; -import Constants from "oxalis/constants"; +import type { OxalisState, SkeletonTracing, TreeMap } from "oxalis/store"; function getSkeletonTracingForConnectome( state: OxalisState, diff --git a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts index 295a6d411ad..6c7972ff25b 100644 --- a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts @@ -1,10 +1,10 @@ -import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState } from "oxalis/store"; -import { updateKey2 } from "oxalis/model/helpers/deep_update"; -import { getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; import DiffableMap from "libs/diffable_map"; -import { MappingStatusEnum } from "oxalis/constants"; import { deepIterate } from "libs/utils"; +import { MappingStatusEnum } from "oxalis/constants"; +import { getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import { updateKey2 } from "oxalis/model/helpers/deep_update"; +import type { OxalisState } from "oxalis/store"; function createDictWithKeysAndValue( keys: Array, diff --git a/frontend/javascripts/oxalis/model/reducers/flycam_reducer.ts b/frontend/javascripts/oxalis/model/reducers/flycam_reducer.ts index 3569f26a462..54c5ac95991 100644 --- a/frontend/javascripts/oxalis/model/reducers/flycam_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/flycam_reducer.ts @@ -1,17 +1,17 @@ -import _ from "lodash"; import update from "immutability-helper"; -import type { Action } from "oxalis/model/actions/actions"; import type { Matrix4x4 } from "libs/mjs"; import { M4x4 } from "libs/mjs"; -import type { OxalisState } from "oxalis/store"; +import * as Utils from "libs/utils"; +import _ from "lodash"; import type { Vector3 } from "oxalis/constants"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; import { - getValidZoomRangeForUser, ZOOM_STEP_INTERVAL, + getValidZoomRangeForUser, } from "oxalis/model/accessors/flycam_accessor"; +import type { Action } from "oxalis/model/actions/actions"; import Dimensions from "oxalis/model/dimensions"; -import * as Utils from "libs/utils"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import type { OxalisState } from "oxalis/store"; import { getUnifiedAdditionalCoordinates } from "../accessors/dataset_accessor"; function cloneMatrix(m: Matrix4x4): Matrix4x4 { diff --git a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts index 9f4d556f8ee..29bcdbc048e 100644 --- a/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/reducer_helpers.ts @@ -1,27 +1,27 @@ import Maybe from "data.maybe"; +import * as Utils from "libs/utils"; +import { AnnotationToolEnum } from "oxalis/constants"; +import type { AnnotationTool, BoundingBoxType } from "oxalis/constants"; +import { getDisabledInfoForTools } from "oxalis/model/accessors/tool_accessor"; +import { + isVolumeAnnotationDisallowedForZoom, + isVolumeTool, +} from "oxalis/model/accessors/volumetracing_accessor"; import { updateKey } from "oxalis/model/helpers/deep_update"; -import type { - AdditionalAxis, - APIAnnotation, - ServerAdditionalAxis, - ServerBoundingBox, - UserBoundingBoxFromServer, -} from "types/api_flow_types"; import type { Annotation, BoundingBoxObject, + OxalisState, UserBoundingBox, UserBoundingBoxToServer, - OxalisState, } from "oxalis/store"; -import { AnnotationToolEnum } from "oxalis/constants"; -import type { BoundingBoxType, AnnotationTool } from "oxalis/constants"; -import * as Utils from "libs/utils"; -import { getDisabledInfoForTools } from "oxalis/model/accessors/tool_accessor"; -import { - isVolumeTool, - isVolumeAnnotationDisallowedForZoom, -} from "oxalis/model/accessors/volumetracing_accessor"; +import type { + APIAnnotation, + AdditionalAxis, + ServerAdditionalAxis, + ServerBoundingBox, + UserBoundingBoxFromServer, +} from "types/api_flow_types"; export function convertServerBoundingBoxToBoundingBox( boundingBox: ServerBoundingBox, diff --git a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts index 39c9c72edad..34102ddcf02 100644 --- a/frontend/javascripts/oxalis/model/reducers/save_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/save_reducer.ts @@ -1,12 +1,12 @@ -import _ from "lodash"; import update from "immutability-helper"; +import Date from "libs/date"; +import _ from "lodash"; +import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SaveState } from "oxalis/store"; import { getActionLog } from "oxalis/model/helpers/action_logger_middleware"; -import { type TracingStats, getStats } from "oxalis/model/accessors/annotation_accessor"; -import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; -import Date from "libs/date"; +import { MAXIMUM_ACTION_COUNT_PER_BATCH } from "oxalis/model/sagas/save_saga_constants"; +import type { OxalisState, SaveState } from "oxalis/store"; // These update actions are not idempotent. Having them // twice in the save queue causes a corruption of the current annotation. diff --git a/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts b/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts index fb681beb5a1..328fd689f8c 100644 --- a/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts @@ -1,19 +1,19 @@ -import type { Action } from "oxalis/model/actions/actions"; -import { MappingStatusEnum } from "oxalis/constants"; -import type { OxalisState, ActiveMappingInfo } from "oxalis/store"; import { clamp } from "libs/utils"; +import { MappingStatusEnum } from "oxalis/constants"; import { getLayerByName, + getMappingInfo, getSegmentationLayers, getVisibleSegmentationLayers, - getMappingInfo, } from "oxalis/model/accessors/dataset_accessor"; -import { updateKey, updateKey2, updateKey3 } from "oxalis/model/helpers/deep_update"; -import { userSettings } from "types/schemas/user_settings.schema"; import { hasEditableMapping, isMappingActivationAllowed, } from "oxalis/model/accessors/volumetracing_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import { updateKey, updateKey2, updateKey3 } from "oxalis/model/helpers/deep_update"; +import type { ActiveMappingInfo, OxalisState } from "oxalis/store"; +import { userSettings } from "types/schemas/user_settings.schema"; import { setRotationReducer } from "./flycam_reducer"; // diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts index 20ec6539289..1fb32974b5e 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer.ts @@ -1,53 +1,53 @@ import Maybe from "data.maybe"; -import _ from "lodash"; import update from "immutability-helper"; +import ColorGenerator from "libs/color_generator"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import Constants, { AnnotationToolEnum, TreeTypeEnum } from "oxalis/constants"; +import { + findTreeByNodeId, + getNodeAndTree, + getSkeletonTracing, + getTree, + getTreesWithType, + isSkeletonLayerTransformed, +} from "oxalis/model/accessors/skeletontracing_accessor"; import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState, SkeletonTracing, Tree, TreeGroup } from "oxalis/store"; import { convertServerAdditionalAxesToFrontEnd, convertServerBoundingBoxToFrontend, convertUserBoundingBoxesFromServerToFrontend, } from "oxalis/model/reducers/reducer_helpers"; import { + addTreesAndGroups, createBranchPoint, - deleteBranchPoint, + createComment, createNode, createTree, - deleteTrees, - deleteNode, - deleteEdge, - shuffleTreeColor, - setTreeColorIndex, - createComment, + createTreeMapFromTreeArray, + deleteBranchPoint, deleteComment, + deleteEdge, + deleteNode, + deleteTrees, + ensureTreeNames, + getOrCreateTree, mergeTrees, - toggleAllTreesReducer, - toggleTreeGroupReducer, - addTreesAndGroups, - createTreeMapFromTreeArray, removeMissingGroupsFromTrees, - getOrCreateTree, - ensureTreeNames, setExpandedTreeGroups, + setTreeColorIndex, + shuffleTreeColor, + toggleAllTreesReducer, + toggleTreeGroupReducer, } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { - getSkeletonTracing, - findTreeByNodeId, - getTree, - getTreesWithType, - getNodeAndTree, - isSkeletonLayerTransformed, -} from "oxalis/model/accessors/skeletontracing_accessor"; -import ColorGenerator from "libs/color_generator"; -import Constants, { AnnotationToolEnum, TreeTypeEnum } from "oxalis/constants"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import { userSettings } from "types/schemas/user_settings.schema"; +import type { OxalisState, SkeletonTracing, Tree, TreeGroup } from "oxalis/store"; import { GroupTypeEnum, getNodeKey, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import type { MetadataEntryProto } from "types/api_flow_types"; +import { userSettings } from "types/schemas/user_settings.schema"; function SkeletonTracingReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { diff --git a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts index ab66187254f..ed1896c47a2 100644 --- a/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/skeletontracing_reducer_helpers.ts @@ -1,48 +1,48 @@ import Maybe from "data.maybe"; -import _ from "lodash"; import update from "immutability-helper"; +import ColorGenerator from "libs/color_generator"; +import DiffableMap from "libs/diffable_map"; +import { V3 } from "libs/mjs"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import { type TreeType, TreeTypeEnum, type Vector3 } from "oxalis/constants"; +import Constants, { NODE_ID_REF_REGEX } from "oxalis/constants"; +import { + findTreeByNodeId, + getActiveNodeFromTree, + getActiveTree, + getActiveTreeGroup, + getSkeletonTracing, + getTree, + mapGroups, + mapGroupsToGenerator, +} from "oxalis/model/accessors/skeletontracing_accessor"; +import EdgeCollection from "oxalis/model/edge_collection"; import type { - OxalisState, - SkeletonTracing, - Edge, - Node, - MutableNode, - Tree, - MutableTree, BranchPoint, + CommentType, + Edge, MutableBranchPoint, MutableCommentType, - TreeMap, + MutableNode, + MutableNodeMap, + MutableTree, + MutableTreeGroup, MutableTreeMap, - CommentType, - TreeGroup, + Node, + OxalisState, RestrictionsAndSettings, - MutableTreeGroup, - MutableNodeMap, + SkeletonTracing, + Tree, + TreeGroup, + TreeMap, } from "oxalis/store"; import type { - ServerSkeletonTracingTree, - ServerNode, - ServerBranchPoint, MetadataEntryProto, + ServerBranchPoint, + ServerNode, + ServerSkeletonTracingTree, } from "types/api_flow_types"; -import { - getSkeletonTracing, - getActiveNodeFromTree, - getTree, - getActiveTree, - getActiveTreeGroup, - findTreeByNodeId, - mapGroupsToGenerator, - mapGroups, -} from "oxalis/model/accessors/skeletontracing_accessor"; -import ColorGenerator from "libs/color_generator"; -import { type TreeType, TreeTypeEnum, type Vector3 } from "oxalis/constants"; -import Constants, { NODE_ID_REF_REGEX } from "oxalis/constants"; -import DiffableMap from "libs/diffable_map"; -import EdgeCollection from "oxalis/model/edge_collection"; -import * as Utils from "libs/utils"; -import { V3 } from "libs/mjs"; import type { AdditionalCoordinate } from "types/api_flow_types"; export function generateTreeName(state: OxalisState, timestamp: number, treeId: number) { diff --git a/frontend/javascripts/oxalis/model/reducers/ui_reducer.ts b/frontend/javascripts/oxalis/model/reducers/ui_reducer.ts index f2ff54b4d7d..a35e6b62e76 100644 --- a/frontend/javascripts/oxalis/model/reducers/ui_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/ui_reducer.ts @@ -1,12 +1,12 @@ import type { Action } from "oxalis/model/actions/actions"; -import type { OxalisState } from "oxalis/store"; import { updateKey, updateKey2 } from "oxalis/model/helpers/deep_update"; import { - setToolReducer, getNextTool, getPreviousTool, + setToolReducer, } from "oxalis/model/reducers/reducer_helpers"; import { hideBrushReducer } from "oxalis/model/reducers/volumetracing_reducer_helpers"; +import type { OxalisState } from "oxalis/store"; function UiReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { diff --git a/frontend/javascripts/oxalis/model/reducers/view_mode_reducer.ts b/frontend/javascripts/oxalis/model/reducers/view_mode_reducer.ts index 8912ac9cb57..271beed14be 100644 --- a/frontend/javascripts/oxalis/model/reducers/view_mode_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/view_mode_reducer.ts @@ -1,10 +1,10 @@ import update from "immutability-helper"; -import type { Action } from "oxalis/model/actions/actions"; import type { Point2, Rect, Viewport } from "oxalis/constants"; import { ArbitraryViewport } from "oxalis/constants"; -import type { OxalisState, PartialCameraData } from "oxalis/store"; import { getTDViewportSize } from "oxalis/model/accessors/view_mode_accessor"; +import type { Action } from "oxalis/model/actions/actions"; import { zoomReducer } from "oxalis/model/reducers/flycam_reducer"; +import type { OxalisState, PartialCameraData } from "oxalis/store"; function ViewModeReducer(state: OxalisState, action: Action): OxalisState { switch (action.type) { diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts index 46d3bcefe84..36ceb1c4ba7 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts @@ -1,63 +1,63 @@ import update from "immutability-helper"; +import DiffableMap from "libs/diffable_map"; +import * as Utils from "libs/utils"; import { ContourModeEnum } from "oxalis/constants"; +import { + getMappingInfo, + getMaximumSegmentIdForLayer, +} from "oxalis/model/accessors/dataset_accessor"; +import { + getRequestedOrVisibleSegmentationLayer, + getSegmentationLayerForTracing, + getVisibleSegments, + getVolumeTracingById, +} from "oxalis/model/accessors/volumetracing_accessor"; import type { - EditableMapping, - OxalisState, - Segment, - SegmentGroup, - SegmentMap, - VolumeTracing, -} from "oxalis/store"; + FinishMappingInitializationAction, + SetMappingAction, + SetMappingEnabledAction, + SetMappingNameAction, +} from "oxalis/model/actions/settings_actions"; import type { - VolumeTracingAction, - UpdateSegmentAction, - SetSegmentsAction, - RemoveSegmentAction, ClickSegmentAction, + RemoveSegmentAction, + SetSegmentsAction, + UpdateSegmentAction, + VolumeTracingAction, } from "oxalis/model/actions/volumetracing_actions"; +import { updateKey2 } from "oxalis/model/helpers/deep_update"; import { convertServerAdditionalAxesToFrontEnd, convertServerBoundingBoxToFrontend, convertUserBoundingBoxesFromServerToFrontend, } from "oxalis/model/reducers/reducer_helpers"; import { - getRequestedOrVisibleSegmentationLayer, - getSegmentationLayerForTracing, - getVisibleSegments, - getVolumeTracingById, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { - setActiveCellReducer, - createCellReducer, - updateDirectionReducer, addToLayerReducer, - resetContourReducer, + createCellReducer, hideBrushReducer, + removeMissingGroupsFromSegments, + resetContourReducer, + setActiveCellReducer, setContourTracingModeReducer, setLargestSegmentIdReducer, - updateVolumeTracing, setMappingNameReducer, - removeMissingGroupsFromSegments, + updateDirectionReducer, + updateVolumeTracing, } from "oxalis/model/reducers/volumetracing_reducer_helpers"; -import { updateKey2 } from "oxalis/model/helpers/deep_update"; -import DiffableMap from "libs/diffable_map"; -import * as Utils from "libs/utils"; -import type { AdditionalCoordinate, ServerVolumeTracing } from "types/api_flow_types"; import type { - FinishMappingInitializationAction, - SetMappingAction, - SetMappingEnabledAction, - SetMappingNameAction, -} from "oxalis/model/actions/settings_actions"; -import { - getMappingInfo, - getMaximumSegmentIdForLayer, -} from "oxalis/model/accessors/dataset_accessor"; -import { mapGroups } from "../accessors/skeletontracing_accessor"; + EditableMapping, + OxalisState, + Segment, + SegmentGroup, + SegmentMap, + VolumeTracing, +} from "oxalis/store"; import { findParentIdForGroupId, getGroupNodeKey, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; +import type { AdditionalCoordinate, ServerVolumeTracing } from "types/api_flow_types"; +import { mapGroups } from "../accessors/skeletontracing_accessor"; import { sanitizeMetadata } from "./skeletontracing_reducer"; type SegmentUpdateInfo = | { diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts index 96e6dc11da0..06e63b6b259 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts @@ -1,27 +1,27 @@ import update from "immutability-helper"; import { type ContourMode, - OrthoViews, type OrthoViewWithoutTD, + OrthoViews, type Vector3, } from "oxalis/constants"; +import { + getSegmentationLayerForTracing, + isVolumeAnnotationDisallowedForZoom, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { updateKey } from "oxalis/model/helpers/deep_update"; +import { setDirectionReducer } from "oxalis/model/reducers/flycam_reducer"; import type { EditableMapping, - MappingType, LabelAction, + MappingType, OxalisState, - VolumeTracing, SegmentGroup, SegmentMap, + VolumeTracing, } from "oxalis/store"; -import { - getSegmentationLayerForTracing, - isVolumeAnnotationDisallowedForZoom, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { setDirectionReducer } from "oxalis/model/reducers/flycam_reducer"; -import { updateKey } from "oxalis/model/helpers/deep_update"; -import { mapGroupsToGenerator } from "../accessors/skeletontracing_accessor"; import { getMaximumSegmentIdForLayer } from "../accessors/dataset_accessor"; +import { mapGroupsToGenerator } from "../accessors/skeletontracing_accessor"; export function updateVolumeTracing( state: OxalisState, diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 255a47fbcfb..798675763db 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -1,54 +1,54 @@ -import React from "react"; +import type { EditableAnnotation } from "admin/admin_rest_api"; +import { acquireAnnotationMutex, editAnnotation } from "admin/admin_rest_api"; +import { Button } from "antd"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; import _ from "lodash"; +import messages from "messages"; +import constants, { MappingStatusEnum } from "oxalis/constants"; +import { getMappingInfo, is2dDataset } from "oxalis/model/accessors/dataset_accessor"; +import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import type { Action } from "oxalis/model/actions/actions"; import { type EditAnnotationLayerAction, - setAnnotationAllowUpdateAction, type SetAnnotationDescriptionAction, - setBlockedByUserAction, type SetOthersMayEditForAnnotationAction, + setAnnotationAllowUpdateAction, + setBlockedByUserAction, } from "oxalis/model/actions/annotation_actions"; -import * as Utils from "libs/utils"; -import type { EditableAnnotation } from "admin/admin_rest_api"; -import type { ActionPattern } from "redux-saga/effects"; -import { editAnnotation, acquireAnnotationMutex } from "admin/admin_rest_api"; +import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, } from "oxalis/model/sagas/save_saga_constants"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { Model } from "oxalis/singletons"; +import Store from "oxalis/store"; +import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; +import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; +import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; +import React from "react"; +import type { ActionPattern } from "redux-saga/effects"; import { - takeLatest, - take, - retry, - delay, call, - put, - fork, - takeEvery, cancel, cancelled, + delay, + fork, + put, + retry, + take, + takeEvery, + takeLatest, } from "typed-redux-saga"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { getMappingInfo, is2dDataset } from "oxalis/model/accessors/dataset_accessor"; -import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; -import { Model } from "oxalis/singletons"; -import Store from "oxalis/store"; -import Toast from "libs/toast"; -import constants, { MappingStatusEnum } from "oxalis/constants"; -import messages from "messages"; import type { APIUserCompact } from "types/api_flow_types"; -import { Button } from "antd"; -import ErrorHandling from "libs/error_handling"; import { mayEditAnnotationProperties } from "../accessors/annotation_accessor"; -import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; -import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; -import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; -import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { ensureWkReady } from "./ready_sagas"; +import { updateAnnotationLayerName, updateMetadataOfAnnotation } from "./update_actions"; /* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping compare https://github.com/scalableminds/webknossos/issues/5223. diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts index 480df8f7071..adf50f50969 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/annotation_tool_saga.ts @@ -1,16 +1,16 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { take, call, put } from "typed-redux-saga"; +import { AnnotationToolEnum, MeasurementTools } from "oxalis/constants"; +import { getToolClassForAnnotationTool } from "oxalis/controller/combinations/tool_controls"; +import getSceneController from "oxalis/controller/scene_controller_provider"; import { - type SetToolAction, type CycleToolAction, + type SetToolAction, hideMeasurementTooltipAction, setIsMeasuringAction, } from "oxalis/model/actions/ui_actions"; import { getNextTool } from "oxalis/model/reducers/reducer_helpers"; -import { getToolClassForAnnotationTool } from "oxalis/controller/combinations/tool_controls"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { AnnotationToolEnum, MeasurementTools } from "oxalis/constants"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; +import { call, put, take } from "typed-redux-saga"; import { ensureWkReady } from "./ready_sagas"; export function* watchToolDeselection(): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/clip_histogram_saga.ts b/frontend/javascripts/oxalis/model/sagas/clip_histogram_saga.ts index b5fc6dc2c94..c1342a0da37 100644 --- a/frontend/javascripts/oxalis/model/sagas/clip_histogram_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/clip_histogram_saga.ts @@ -1,13 +1,13 @@ -import Store from "oxalis/store"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { takeEvery } from "typed-redux-saga"; -import type { ClipHistogramAction } from "oxalis/model/actions/settings_actions"; -import { updateLayerSettingAction } from "oxalis/model/actions/settings_actions"; import Toast from "libs/toast"; import { OrthoViews, type Vector3 } from "oxalis/constants"; -import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling/bucket"; import { getLayerByName } from "oxalis/model/accessors/dataset_accessor"; +import type { ClipHistogramAction } from "oxalis/model/actions/settings_actions"; +import { updateLayerSettingAction } from "oxalis/model/actions/settings_actions"; +import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling/bucket"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; import { api } from "oxalis/singletons"; +import Store from "oxalis/store"; +import { takeEvery } from "typed-redux-saga"; import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor"; function onThresholdChange(layerName: string, [firstVal, secVal]: [number, number]) { diff --git a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts index 9ba779b6de3..037d7730c55 100644 --- a/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/dataset_saga.ts @@ -1,24 +1,24 @@ -import { call, put, takeEvery, takeLatest } from "typed-redux-saga"; +import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import { sleep } from "libs/utils"; import { sum } from "lodash"; +import messages from "messages"; +import { Identity4x4 } from "oxalis/constants"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; -import { sleep } from "libs/utils"; -import Toast from "libs/toast"; -import messages from "messages"; +import { hasSegmentIndex } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import { call, put, takeEvery, takeLatest } from "typed-redux-saga"; import { getEnabledLayers, getLayerByName, - getMaybeSegmentIndexAvailability, getMagInfo, + getMaybeSegmentIndexAvailability, getTransformsForLayer, invertAndTranspose, isLayerVisible, } from "../accessors/dataset_accessor"; import { getCurrentMag } from "../accessors/flycam_accessor"; import { getViewportExtents } from "../accessors/view_mode_accessor"; -import { V3 } from "libs/mjs"; -import { Identity4x4 } from "oxalis/constants"; -import { hasSegmentIndex } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import { type EnsureSegmentIndexIsLoadedAction, setLayerHasSegmentIndexAction, diff --git a/frontend/javascripts/oxalis/model/sagas/effect-generators.ts b/frontend/javascripts/oxalis/model/sagas/effect-generators.ts index 60233bc273b..3835e1c1ea9 100644 --- a/frontend/javascripts/oxalis/model/sagas/effect-generators.ts +++ b/frontend/javascripts/oxalis/model/sagas/effect-generators.ts @@ -1,9 +1,9 @@ +import type { Action } from "oxalis/model/actions/actions"; // The typings are defined in effect-generators.js.flow. import type { OxalisState } from "oxalis/store"; -import type { Action } from "oxalis/model/actions/actions"; -import { select as _select, take as _take } from "typed-redux-saga"; import type { Channel } from "redux-saga"; import type { ActionPattern } from "redux-saga/effects"; +import { select as _select, take as _take } from "typed-redux-saga"; // Ensures that the type of state is known. Otherwise, // a statement such as diff --git a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts index 9cee9a4ba36..1e583853a0a 100644 --- a/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/load_histogram_data_saga.ts @@ -1,15 +1,15 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { call, takeEvery, put } from "typed-redux-saga"; +import { getHistogramForLayer } from "admin/admin_rest_api"; +import type { Vector2 } from "oxalis/constants"; import { + type ReloadHistogramAction, setHistogramDataForLayerAction, updateLayerSettingAction, - type ReloadHistogramAction, } from "oxalis/model/actions/settings_actions"; -import { getHistogramForLayer } from "admin/admin_rest_api"; import type DataLayer from "oxalis/model/data_layer"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; import { Model } from "oxalis/singletons"; -import type { Vector2 } from "oxalis/constants"; +import { call, put, takeEvery } from "typed-redux-saga"; import { ensureWkReady } from "./ready_sagas"; export default function* loadHistogramDataSaga(): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index d26dabd183b..d173e1dbdf5 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -1,49 +1,51 @@ -import _ from "lodash"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { - all, - call, - cancel, - fork, - takeEvery, - takeLatest, - take, - put, - race, - actionChannel, - flush, -} from "typed-redux-saga"; -import { api } from "oxalis/singletons"; -import { buffers, eventChannel } from "redux-saga"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { message } from "antd"; -import type { - OptionalMappingProperties, - SetMappingAction, -} from "oxalis/model/actions/settings_actions"; -import { - clearMappingAction, - finishMappingInitializationAction, - setMappingAction, -} from "oxalis/model/actions/settings_actions"; import { fetchMapping, - getMappingsForDatasetLayer, getAgglomeratesForDatasetLayer, getAgglomeratesForSegmentsFromDatastore, getAgglomeratesForSegmentsFromTracingstore, + getMappingsForDatasetLayer, } from "admin/admin_rest_api"; -import type { APIDataLayer, APIMapping } from "types/api_flow_types"; -import { - type EnsureLayerMappingsAreLoadedAction, - setLayerMappingsAction, -} from "oxalis/model/actions/dataset_actions"; +import { message } from "antd"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import { fastDiffSetAndMap, sleep } from "libs/utils"; +import _ from "lodash"; +import { MappingStatusEnum } from "oxalis/constants"; import { getLayerByName, getMappingInfo, getSegmentationLayers, getVisibleSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; +import { + type BucketRetrievalSource, + getBucketRetrievalSourceFn, + getEditableMappingForVolumeTracingId, + needsLocalHdf5Mapping as getNeedsLocalHdf5Mapping, + isMappingActivationAllowed, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { + type EnsureLayerMappingsAreLoadedAction, + setLayerMappingsAction, +} from "oxalis/model/actions/dataset_actions"; +import type { + OptionalMappingProperties, + SetMappingAction, +} from "oxalis/model/actions/settings_actions"; +import { + clearMappingAction, + finishMappingInitializationAction, + setMappingAction, +} from "oxalis/model/actions/settings_actions"; +import { + MAPPING_MESSAGE_KEY, + setCacheResultForDiffMappings, +} from "oxalis/model/bucket_data_handling/mappings"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; +import { jsHsv2rgb } from "oxalis/shaders/utils.glsl"; +import { api } from "oxalis/singletons"; +import { Model } from "oxalis/singletons"; import type { ActiveMappingInfo, Mapping, @@ -51,27 +53,25 @@ import type { NumberLike, NumberLikeMap, } from "oxalis/store"; -import ErrorHandling from "libs/error_handling"; -import { - MAPPING_MESSAGE_KEY, - setCacheResultForDiffMappings, -} from "oxalis/model/bucket_data_handling/mappings"; -import { Model } from "oxalis/singletons"; +import { buffers, eventChannel } from "redux-saga"; +import type { ActionPattern } from "redux-saga/effects"; import { - isMappingActivationAllowed, - getEditableMappingForVolumeTracingId, - needsLocalHdf5Mapping as getNeedsLocalHdf5Mapping, - getBucketRetrievalSourceFn, - type BucketRetrievalSource, -} from "oxalis/model/accessors/volumetracing_accessor"; -import Toast from "libs/toast"; -import { jsHsv2rgb } from "oxalis/shaders/utils.glsl"; + actionChannel, + all, + call, + cancel, + flush, + fork, + put, + race, + take, + takeEvery, + takeLatest, +} from "typed-redux-saga"; +import type { APIDataLayer, APIMapping } from "types/api_flow_types"; +import type { Action } from "../actions/actions"; import { updateSegmentAction } from "../actions/volumetracing_actions"; -import { MappingStatusEnum } from "oxalis/constants"; import type DataCube from "../bucket_data_handling/data_cube"; -import { fastDiffSetAndMap, sleep } from "libs/utils"; -import type { Action } from "../actions/actions"; -import type { ActionPattern } from "redux-saga/effects"; import { listenToStoreProperty } from "../helpers/listener_helpers"; import { ensureWkReady } from "./ready_sagas"; diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index bc56aad1fa3..bdb969edf50 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -1,87 +1,87 @@ import { saveAs } from "file-saver"; -import _ from "lodash"; -import { V3 } from "libs/mjs"; -import { areVec3AlmostEqual, chunkDynamically, sleep } from "libs/utils"; -import ErrorHandling from "libs/error_handling"; -import type { APIDataset, APIMeshFile, APISegmentationLayer } from "types/api_flow_types"; import { mergeBufferGeometries } from "libs/BufferGeometryUtils"; import Deferred from "libs/async/deferred"; +import ErrorHandling from "libs/error_handling"; +import { V3 } from "libs/mjs"; +import { areVec3AlmostEqual, chunkDynamically, sleep } from "libs/utils"; +import _ from "lodash"; import type { ActionPattern } from "redux-saga/effects"; +import type { APIDataset, APIMeshFile, APISegmentationLayer } from "types/api_flow_types"; -import Store from "oxalis/store"; +import { + computeAdHocMesh, + getBucketPositionsForAdHocMesh, + getMeshfilesForDatasetLayer, + meshApi, + sendAnalyticsEvent, +} from "admin/admin_rest_api"; +import ThreeDMap from "libs/ThreeDMap"; +import processTaskWithPool from "libs/async/task_pool"; +import { getDracoLoader } from "libs/draco"; +import exportToStl from "libs/stl_exporter"; +import Toast from "libs/toast"; +import Zip from "libs/zipjs_wrapper"; +import messages from "messages"; +import { WkDevFlags } from "oxalis/api/wk_dev"; +import type { Vector3 } from "oxalis/constants"; +import { AnnotationToolEnum, MappingStatusEnum } from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import type { BufferGeometryWithInfo } from "oxalis/controller/segment_mesh_controller"; import { getMagInfo, getMappingInfo, - getVisibleSegmentationLayer, getSegmentationLayerByName, + getVisibleSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; import { - type LoadAdHocMeshAction, - type LoadPrecomputedMeshAction, - type AdHocMeshInfo, - loadPrecomputedMeshAction, -} from "oxalis/model/actions/segmentation_actions"; + getActiveSegmentationTracing, + getEditableMappingForVolumeTracingId, + getMeshInfoForSegment, + getTracingForSegmentationLayer, +} from "oxalis/model/accessors/volumetracing_accessor"; import type { Action } from "oxalis/model/actions/actions"; -import type { Vector3 } from "oxalis/constants"; -import { AnnotationToolEnum, MappingStatusEnum } from "oxalis/constants"; import { - type UpdateMeshVisibilityAction, - type RemoveMeshAction, + type MaybeFetchMeshFilesAction, type RefreshMeshAction, + type RemoveMeshAction, type TriggerMeshDownloadAction, - type MaybeFetchMeshFilesAction, - updateMeshFileListAction, - updateCurrentMeshFileAction, - dispatchMaybeFetchMeshFilesAsync, - removeMeshAction, + type TriggerMeshesDownloadAction, + type UpdateMeshVisibilityAction, addAdHocMeshAction, addPrecomputedMeshAction, + dispatchMaybeFetchMeshFilesAsync, finishedLoadingMeshAction, + removeMeshAction, startedLoadingMeshAction, - type TriggerMeshesDownloadAction, + updateCurrentMeshFileAction, + updateMeshFileListAction, updateMeshVisibilityAction, } from "oxalis/model/actions/annotation_actions"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { actionChannel, takeEvery, call, take, race, put, all } from "typed-redux-saga"; -import { stlMeshConstants } from "oxalis/view/right-border-tabs/segments_tab/segments_view"; +import { saveNowAction } from "oxalis/model/actions/save_actions"; import { - computeAdHocMesh, - sendAnalyticsEvent, - meshApi, - getMeshfilesForDatasetLayer, - getBucketPositionsForAdHocMesh, -} from "admin/admin_rest_api"; -import { zoomedAddressToAnotherZoomStepWithInfo } from "oxalis/model/helpers/position_converter"; + type AdHocMeshInfo, + type LoadAdHocMeshAction, + type LoadPrecomputedMeshAction, + loadPrecomputedMeshAction, +} from "oxalis/model/actions/segmentation_actions"; import type DataLayer from "oxalis/model/data_layer"; +import { zoomedAddressToAnotherZoomStepWithInfo } from "oxalis/model/helpers/position_converter"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; import { Model } from "oxalis/singletons"; -import ThreeDMap from "libs/ThreeDMap"; -import exportToStl from "libs/stl_exporter"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { - getActiveSegmentationTracing, - getEditableMappingForVolumeTracingId, - getMeshInfoForSegment, - getTracingForSegmentationLayer, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { saveNowAction } from "oxalis/model/actions/save_actions"; -import Toast from "libs/toast"; -import { getDracoLoader } from "libs/draco"; -import messages from "messages"; -import processTaskWithPool from "libs/async/task_pool"; +import Store from "oxalis/store"; +import { stlMeshConstants } from "oxalis/view/right-border-tabs/segments_tab/segments_view"; import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import { actionChannel, all, call, put, race, take, takeEvery } from "typed-redux-saga"; +import type { AdditionalCoordinate } from "types/api_flow_types"; +import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; +import type { FlycamAction } from "../actions/flycam_actions"; import type { BatchUpdateGroupsAndSegmentsAction, RemoveSegmentAction, UpdateSegmentAction, } from "../actions/volumetracing_actions"; import type { MagInfo } from "../helpers/mag_info"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import Zip from "libs/zipjs_wrapper"; -import type { FlycamAction } from "../actions/flycam_actions"; -import { getAdditionalCoordinatesAsString } from "../accessors/flycam_accessor"; -import type { BufferGeometryWithInfo } from "oxalis/controller/segment_mesh_controller"; -import { WkDevFlags } from "oxalis/api/wk_dev"; import { ensureSceneControllerReady, ensureWkReady } from "./ready_sagas"; export const NO_LOD_MESH_INDEX = -1; diff --git a/frontend/javascripts/oxalis/model/sagas/min_cut_saga.ts b/frontend/javascripts/oxalis/model/sagas/min_cut_saga.ts index 3453390fdeb..d8f0b4e5d1b 100644 --- a/frontend/javascripts/oxalis/model/sagas/min_cut_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/min_cut_saga.ts @@ -1,28 +1,28 @@ +import { V3 } from "libs/mjs"; +import createProgressCallback from "libs/progress_callback"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import window from "libs/window"; import _ from "lodash"; -import type { Action } from "oxalis/model/actions/actions"; import type { BoundingBoxType, TypedArray, Vector3 } from "oxalis/constants"; -import type { MutableNode, Node } from "oxalis/store"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { call, put } from "typed-redux-saga"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { V3 } from "libs/mjs"; -import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; +import { getMagInfo } from "oxalis/model/accessors/dataset_accessor"; import { enforceActiveVolumeTracing, getActiveSegmentationTracingLayer, } from "oxalis/model/accessors/volumetracing_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; import { finishAnnotationStrokeAction } from "oxalis/model/actions/volumetracing_actions"; -import { getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import { takeEveryUnlessBusy } from "oxalis/model/sagas/saga_helpers"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import createProgressCallback from "libs/progress_callback"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; +import { takeEveryUnlessBusy } from "oxalis/model/sagas/saga_helpers"; import { api } from "oxalis/singletons"; -import window from "libs/window"; +import type { MutableNode, Node } from "oxalis/store"; +import { call, put } from "typed-redux-saga"; import type { APISegmentationLayer } from "types/api_flow_types"; -import type { MagInfo } from "../helpers/mag_info"; import type { AdditionalCoordinate } from "types/api_flow_types"; +import type { MagInfo } from "../helpers/mag_info"; // By default, a new bounding box is created around // the seed nodes with a padding. Within the bounding box diff --git a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts index d06315bedd6..6308531473c 100644 --- a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.ts @@ -1,26 +1,26 @@ +import { WkDevFlags } from "oxalis/api/wk_dev"; +import type { Vector3 } from "oxalis/constants"; +import constants from "oxalis/constants"; +import { getMagInfo, isLayerVisible } from "oxalis/model/accessors/dataset_accessor"; +import { + getActiveMagIndexForLayer, + getAreasFromState, + getPosition, +} from "oxalis/model/accessors/flycam_accessor"; import { FlycamActions } from "oxalis/model/actions/flycam_actions"; -import type { OxalisState } from "oxalis/store"; import { PrefetchStrategyArbitrary } from "oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary"; import { + ContentTypes as PrefetchContentTypes, PrefetchStrategySkeleton, PrefetchStrategyVolume, - ContentTypes as PrefetchContentTypes, } from "oxalis/model/bucket_data_handling/prefetch_strategy_plane"; import { getGlobalDataConnectionInfo } from "oxalis/model/data_connection_info"; +import type DataLayer from "oxalis/model/data_layer"; import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { throttle, call } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; -import { - getPosition, - getActiveMagIndexForLayer, - getAreasFromState, -} from "oxalis/model/accessors/flycam_accessor"; -import { isLayerVisible, getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import type DataLayer from "oxalis/model/data_layer"; import { Model } from "oxalis/singletons"; -import type { Vector3 } from "oxalis/constants"; -import constants from "oxalis/constants"; -import { WkDevFlags } from "oxalis/api/wk_dev"; +import type { OxalisState } from "oxalis/store"; +import { call, throttle } from "typed-redux-saga"; import { ensureWkReady } from "./ready_sagas"; const PREFETCH_THROTTLE_TIME = 50; diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index b4d744c2dc1..5d9ba7af1ef 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -1,31 +1,26 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { takeEvery, put, call, all, spawn } from "typed-redux-saga"; -import { select, take } from "oxalis/model/sagas/effect-generators"; +import { + type NeighborInfo, + getAgglomeratesForSegmentsFromTracingstore, + getEdgesForAgglomerateMinCut, + getNeighborsForAgglomerateNode, + getPositionForSegmentInAgglomerate, +} from "admin/admin_rest_api"; +import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import { SoftError, isBigInt, isNumberMap } from "libs/utils"; +import _ from "lodash"; import { AnnotationToolEnum, MappingStatusEnum, TreeTypeEnum, type Vector3, } from "oxalis/constants"; -import Toast from "libs/toast"; -import { - type CreateNodeAction, - type DeleteNodeAction, - deleteEdgeAction, - setTreeNameAction, - type SetNodePositionAction, -} from "oxalis/model/actions/skeletontracing_actions"; +import { getSegmentIdForPositionAsync } from "oxalis/controller/combinations/volume_handlers"; import { - initializeEditableMappingAction, - removeSegmentAction, - setHasEditableMappingAction, - updateSegmentAction, -} from "oxalis/model/actions/volumetracing_actions"; -import type { - MinCutAgglomerateWithPositionAction, - ProofreadAtPositionAction, - ProofreadMergeAction, -} from "oxalis/model/actions/proofread_actions"; + getLayerByName, + getMagInfo, + getMappingInfo, +} from "oxalis/model/accessors/dataset_accessor"; import { enforceSkeletonTracing, findTreeByNodeId, @@ -33,53 +28,58 @@ import { getTreeNameForAgglomerateSkeleton, isSkeletonLayerTransformed, } from "oxalis/model/accessors/skeletontracing_accessor"; -import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { - splitAgglomerate, - mergeAgglomerate, - type UpdateActionWithoutIsolationRequirement, -} from "oxalis/model/sagas/update_actions"; -import { Model, api, Store } from "oxalis/singletons"; -import { - getActiveSegmentationTracingLayer, getActiveSegmentationTracing, - getSegmentsForLayer, - getMeshInfoForSegment, + getActiveSegmentationTracingLayer, getEditableMappingForVolumeTracingId, + getMeshInfoForSegment, getSegmentName, + getSegmentsForLayer, } from "oxalis/model/accessors/volumetracing_accessor"; import { - getLayerByName, - getMappingInfo, - getMagInfo, -} from "oxalis/model/accessors/dataset_accessor"; -import { - type NeighborInfo, - getAgglomeratesForSegmentsFromTracingstore, - getEdgesForAgglomerateMinCut, - getNeighborsForAgglomerateNode, - getPositionForSegmentInAgglomerate, -} from "admin/admin_rest_api"; -import { setMappingAction, setMappingNameAction } from "oxalis/model/actions/settings_actions"; -import { getSegmentIdForPositionAsync } from "oxalis/controller/combinations/volume_handlers"; + dispatchMaybeFetchMeshFilesAsync, + refreshMeshAction, + removeMeshAction, +} from "oxalis/model/actions/annotation_actions"; +import type { + MinCutAgglomerateWithPositionAction, + ProofreadAtPositionAction, + ProofreadMergeAction, +} from "oxalis/model/actions/proofread_actions"; +import { pushSaveQueueTransaction } from "oxalis/model/actions/save_actions"; import { loadAdHocMeshAction, loadPrecomputedMeshAction, } from "oxalis/model/actions/segmentation_actions"; -import { V3 } from "libs/mjs"; +import { setMappingAction, setMappingNameAction } from "oxalis/model/actions/settings_actions"; import { - dispatchMaybeFetchMeshFilesAsync, - refreshMeshAction, - removeMeshAction, -} from "oxalis/model/actions/annotation_actions"; + type CreateNodeAction, + type DeleteNodeAction, + type SetNodePositionAction, + deleteEdgeAction, + setTreeNameAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { + initializeEditableMappingAction, + removeSegmentAction, + setHasEditableMappingAction, + updateSegmentAction, +} from "oxalis/model/actions/volumetracing_actions"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select, take } from "oxalis/model/sagas/effect-generators"; +import { + type UpdateActionWithoutIsolationRequirement, + mergeAgglomerate, + splitAgglomerate, +} from "oxalis/model/sagas/update_actions"; +import { Model, Store, api } from "oxalis/singletons"; import type { ActiveMappingInfo, Mapping, NumberLikeMap, Tree, VolumeTracing } from "oxalis/store"; -import _ from "lodash"; +import { all, call, put, spawn, takeEvery } from "typed-redux-saga"; import type { AdditionalCoordinate, ServerEditableMapping } from "types/api_flow_types"; -import { takeEveryUnlessBusy } from "./saga_helpers"; -import type { Action } from "../actions/actions"; -import { isBigInt, isNumberMap, SoftError } from "libs/utils"; import { getCurrentMag } from "../accessors/flycam_accessor"; +import type { Action } from "../actions/actions"; import { ensureWkReady } from "./ready_sagas"; +import { takeEveryUnlessBusy } from "./saga_helpers"; function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { return function* (...args: any[]) { diff --git a/frontend/javascripts/oxalis/model/sagas/quick_select_heuristic_saga.ts b/frontend/javascripts/oxalis/model/sagas/quick_select_heuristic_saga.ts index 8a6956f8cdb..b8bbddc5217 100644 --- a/frontend/javascripts/oxalis/model/sagas/quick_select_heuristic_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/quick_select_heuristic_saga.ts @@ -1,6 +1,7 @@ +import PriorityQueue from "js-priority-queue"; import _ from "lodash"; -import ops from "ndarray-ops"; import moments from "ndarray-moments"; +import ops from "ndarray-ops"; import { ContourModeEnum, type OrthoView, @@ -10,12 +11,14 @@ import { type Vector2, type Vector3, } from "oxalis/constants"; -import PriorityQueue from "js-priority-queue"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { call, put, race, take } from "typed-redux-saga"; -import { select } from "oxalis/model/sagas/effect-generators"; +import { sendAnalyticsEvent } from "admin/admin_rest_api"; +import morphology from "ball-morphology"; import { V2, V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import { clamp, map3, take2 } from "libs/utils"; +import ndarray from "ndarray"; +import type { QuickSelectGeometry } from "oxalis/geometries/helper_geometries"; import { getActiveSegmentationTracing, getSegmentationLayerForTracing, @@ -31,27 +34,17 @@ import { updateSegmentAction, } from "oxalis/model/actions/volumetracing_actions"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; import { api } from "oxalis/singletons"; -import ndarray from "ndarray"; -import morphology from "ball-morphology"; -import Toast from "libs/toast"; import type { DatasetLayerConfiguration, OxalisState, QuickSelectConfig, VolumeTracing, } from "oxalis/store"; -import type { QuickSelectGeometry } from "oxalis/geometries/helper_geometries"; -import { clamp, map3, take2 } from "libs/utils"; +import { call, put, race, take } from "typed-redux-saga"; import type { APIDataLayer, APIDataset } from "types/api_flow_types"; -import { sendAnalyticsEvent } from "admin/admin_rest_api"; -import { copyNdArray } from "./volume/volume_interpolation_saga"; -import { createVolumeLayer, labelWithVoxelBuffer2D } from "./volume/helpers"; -import { - type EnterAction, - type EscapeAction, - showQuickSelectSettingsAction, -} from "../actions/ui_actions"; import { getDefaultValueRangeOfLayer, getEnabledColorLayers, @@ -59,9 +52,16 @@ import { getMagInfo, getTransformsForLayer, } from "../accessors/dataset_accessor"; -import Dimensions, { type DimensionIndices } from "../dimensions"; import { getActiveMagIndexForLayer } from "../accessors/flycam_accessor"; import { updateUserSettingAction } from "../actions/settings_actions"; +import { + type EnterAction, + type EscapeAction, + showQuickSelectSettingsAction, +} from "../actions/ui_actions"; +import Dimensions, { type DimensionIndices } from "../dimensions"; +import { createVolumeLayer, labelWithVoxelBuffer2D } from "./volume/helpers"; +import { copyNdArray } from "./volume/volume_interpolation_saga"; const TOAST_KEY = "QUICKSELECT_PREVIEW_MESSAGE"; diff --git a/frontend/javascripts/oxalis/model/sagas/quick_select_ml_saga.ts b/frontend/javascripts/oxalis/model/sagas/quick_select_ml_saga.ts index cbc1db4181c..b4cd20318f3 100644 --- a/frontend/javascripts/oxalis/model/sagas/quick_select_ml_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/quick_select_ml_saga.ts @@ -1,26 +1,26 @@ +import { getSamMask, sendAnalyticsEvent } from "admin/admin_rest_api"; +import { estimateBBoxInMask } from "libs/find_bounding_box_in_nd"; +import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import { map3, sleep } from "libs/utils"; import _ from "lodash"; import ndarray, { type NdArray } from "ndarray"; +import { WkDevFlags } from "oxalis/api/wk_dev"; import type { OrthoView, TypedArrayWithoutBigInt, Vector2, Vector3 } from "oxalis/constants"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { call, cancel, fork, put } from "typed-redux-saga"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { V3 } from "libs/mjs"; import type { ComputeQuickSelectForPointAction, ComputeQuickSelectForRectAction, } from "oxalis/model/actions/volumetracing_actions"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; -import Toast from "libs/toast"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; import type { OxalisState } from "oxalis/store"; -import { map3, sleep } from "libs/utils"; -import type { AdditionalCoordinate, APIDataset } from "types/api_flow_types"; -import { getSamMask, sendAnalyticsEvent } from "admin/admin_rest_api"; +import { call, cancel, fork, put } from "typed-redux-saga"; +import type { APIDataset, AdditionalCoordinate } from "types/api_flow_types"; +import { getPlaneExtentInVoxelFromStore } from "../accessors/view_mode_accessor"; +import { setGlobalProgressAction } from "../actions/ui_actions"; import Dimensions from "../dimensions"; import { finalizeQuickSelectForSlice, prepareQuickSelect } from "./quick_select_heuristic_saga"; -import { setGlobalProgressAction } from "../actions/ui_actions"; -import { estimateBBoxInMask } from "libs/find_bounding_box_in_nd"; -import { getPlaneExtentInVoxelFromStore } from "../accessors/view_mode_accessor"; -import { WkDevFlags } from "oxalis/api/wk_dev"; const MAXIMUM_MASK_BASE = 1024; diff --git a/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts b/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts index 791d51e9211..7b668a0e6b3 100644 --- a/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts @@ -1,20 +1,20 @@ import ErrorHandling from "libs/error_handling"; -import { type Saga, select } from "oxalis/model/sagas/effect-generators"; -import { call, put, takeEvery } from "typed-redux-saga"; +import features from "features"; +import Toast from "libs/toast"; import type { ComputeQuickSelectForPointAction, ComputeQuickSelectForRectAction, } from "oxalis/model/actions/volumetracing_actions"; -import Toast from "libs/toast"; -import features from "features"; +import { type Saga, select } from "oxalis/model/sagas/effect-generators"; +import { call, put, takeEvery } from "typed-redux-saga"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import type { VolumeTracing } from "oxalis/store"; +import { getActiveSegmentationTracing } from "../accessors/volumetracing_accessor"; import { setBusyBlockingInfoAction, setQuickSelectStateAction } from "../actions/ui_actions"; import performQuickSelectHeuristic from "./quick_select_heuristic_saga"; import performQuickSelectML from "./quick_select_ml_saga"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import { getActiveSegmentationTracing } from "../accessors/volumetracing_accessor"; -import type { VolumeTracing } from "oxalis/store"; import { requestBucketModificationInVolumeTracing } from "./saga_helpers"; function* shouldUseHeuristic() { diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.ts b/frontend/javascripts/oxalis/model/sagas/root_saga.ts index e42faad6d62..e38d681e0e8 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.ts @@ -1,25 +1,25 @@ -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { all, call, cancel, fork, take, takeEvery } from "typed-redux-saga"; +import ErrorHandling from "libs/error_handling"; import { alert } from "libs/window"; -import VolumetracingSagas from "oxalis/model/sagas/volumetracing_saga"; -import SaveSagas, { toggleErrorHighlighting } from "oxalis/model/sagas/save_saga"; -import UndoSaga from "oxalis/model/sagas/undo_saga"; import AnnotationSagas from "oxalis/model/sagas/annotation_saga"; -import { watchDataRelevantChanges } from "oxalis/model/sagas/prefetch_saga"; -import SkeletontracingSagas from "oxalis/model/sagas/skeletontracing_saga"; -import ErrorHandling from "libs/error_handling"; -import meshSaga, { handleAdditionalCoordinateUpdate } from "oxalis/model/sagas/mesh_saga"; -import DatasetSagas from "oxalis/model/sagas/dataset_saga"; import { watchToolDeselection, watchToolReset } from "oxalis/model/sagas/annotation_tool_saga"; -import SettingsSaga from "oxalis/model/sagas/settings_saga"; -import watchTasksAsync, { warnAboutMagRestriction } from "oxalis/model/sagas/task_saga"; -import loadHistogramDataSaga from "oxalis/model/sagas/load_histogram_data_saga"; import listenToClipHistogramSaga from "oxalis/model/sagas/clip_histogram_saga"; +import DatasetSagas from "oxalis/model/sagas/dataset_saga"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import loadHistogramDataSaga from "oxalis/model/sagas/load_histogram_data_saga"; import MappingSaga from "oxalis/model/sagas/mapping_saga"; +import meshSaga, { handleAdditionalCoordinateUpdate } from "oxalis/model/sagas/mesh_saga"; +import { watchDataRelevantChanges } from "oxalis/model/sagas/prefetch_saga"; import ProofreadSaga from "oxalis/model/sagas/proofread_saga"; import ReadySagas, { setWkReadyToFalse } from "oxalis/model/sagas/ready_sagas"; -import { warnIfEmailIsUnverified } from "./user_saga"; +import SaveSagas, { toggleErrorHighlighting } from "oxalis/model/sagas/save_saga"; +import SettingsSaga from "oxalis/model/sagas/settings_saga"; +import SkeletontracingSagas from "oxalis/model/sagas/skeletontracing_saga"; +import watchTasksAsync, { warnAboutMagRestriction } from "oxalis/model/sagas/task_saga"; +import UndoSaga from "oxalis/model/sagas/undo_saga"; +import VolumetracingSagas from "oxalis/model/sagas/volumetracing_saga"; +import { all, call, cancel, fork, take, takeEvery } from "typed-redux-saga"; import type { EscalateErrorAction } from "../actions/actions"; +import { warnIfEmailIsUnverified } from "./user_saga"; let rootSagaCrashed = false; export default function* rootSaga(): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts b/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts index abf7b049b1e..08a958bb996 100644 --- a/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts +++ b/frontend/javascripts/oxalis/model/sagas/saga_helpers.ts @@ -1,19 +1,19 @@ +import type { ActionPattern } from "@redux-saga/types"; import { Modal } from "antd"; +import Toast from "libs/toast"; import messages from "messages"; +import { MappingStatusEnum } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; import { setBusyBlockingInfoAction } from "oxalis/model/actions/ui_actions"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; +import { Store } from "oxalis/singletons"; import type { ActiveMappingInfo, VolumeTracing } from "oxalis/store"; import { call, put, takeEvery } from "typed-redux-saga"; -import Toast from "libs/toast"; -import { Store } from "oxalis/singletons"; -import type { ActionPattern } from "@redux-saga/types"; import { setMappingIsLockedAction, setVolumeBucketDataHasChangedAction, } from "../actions/volumetracing_actions"; -import { MappingStatusEnum } from "oxalis/constants"; export function* takeEveryUnlessBusy

( actionDescriptor: P, diff --git a/frontend/javascripts/oxalis/model/sagas/saga_selectors.ts b/frontend/javascripts/oxalis/model/sagas/saga_selectors.ts index c1d04d807e2..b75cc400777 100644 --- a/frontend/javascripts/oxalis/model/sagas/saga_selectors.ts +++ b/frontend/javascripts/oxalis/model/sagas/saga_selectors.ts @@ -1,11 +1,11 @@ -import type { OxalisState } from "oxalis/store"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { select } from "oxalis/model/sagas/effect-generators"; import { V2 } from "libs/mjs"; -import type { Vector2, OrthoView } from "oxalis/constants"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import type { OrthoView, Vector2 } from "oxalis/constants"; import { getPlaneExtentInVoxelFromStore } from "oxalis/model/accessors/view_mode_accessor"; import Dimensions from "oxalis/model/dimensions"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; +import type { OxalisState } from "oxalis/store"; export function* getHalfViewportExtentsInVx(activeViewport: OrthoView): Saga { const zoom = yield* select((state) => state.flycam.zoomStep); diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.ts b/frontend/javascripts/oxalis/model/sagas/save_saga.ts index 984f815934d..a8e45bcb45c 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.ts @@ -31,6 +31,7 @@ import compactUpdateActions from "oxalis/model/helpers/compaction/compact_update import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; +import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import { MAXIMUM_ACTION_COUNT_PER_SAVE, MAX_SAVE_RETRY_WAITING_TIME, @@ -39,11 +40,10 @@ import { } from "oxalis/model/sagas/save_saga_constants"; import { diffSkeletonTracing } from "oxalis/model/sagas/skeletontracing_saga"; import { - updateTdCamera, type UpdateActionWithoutIsolationRequirement, + updateTdCamera, } from "oxalis/model/sagas/update_actions"; import { diffVolumeTracing } from "oxalis/model/sagas/volumetracing_saga"; -import { ensureWkReady } from "oxalis/model/sagas/ready_sagas"; import { Model } from "oxalis/singletons"; import type { CameraData, diff --git a/frontend/javascripts/oxalis/model/sagas/settings_saga.ts b/frontend/javascripts/oxalis/model/sagas/settings_saga.ts index 454c4389f11..156f46cd3f8 100644 --- a/frontend/javascripts/oxalis/model/sagas/settings_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/settings_saga.ts @@ -1,15 +1,15 @@ +import { updateDatasetConfiguration, updateUserConfiguration } from "admin/admin_rest_api"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import messages from "messages"; +import type { UpdateUserSettingAction } from "oxalis/model/actions/settings_actions"; +import { type Saga, select, take } from "oxalis/model/sagas/effect-generators"; import { SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, } from "oxalis/model/sagas/save_saga_constants"; -import { type Saga, take, select } from "oxalis/model/sagas/effect-generators"; -import { all, takeEvery, debounce, call, retry } from "typed-redux-saga"; -import type { UpdateUserSettingAction } from "oxalis/model/actions/settings_actions"; -import { updateUserConfiguration, updateDatasetConfiguration } from "admin/admin_rest_api"; -import ErrorHandling from "libs/error_handling"; -import Toast from "libs/toast"; -import messages from "messages"; import type { DatasetConfiguration, DatasetLayerConfiguration } from "oxalis/store"; +import { all, call, debounce, retry, takeEvery } from "typed-redux-saga"; function* pushUserSettingsAsync(): Saga { const activeUser = yield* select((state) => state.activeUser); diff --git a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts index f532b4d0021..53911bbdea6 100644 --- a/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/skeletontracing_saga.ts @@ -1,21 +1,52 @@ +import { getAgglomerateSkeleton, getEditableAgglomerateSkeleton } from "admin/admin_rest_api"; import { Modal } from "antd"; +import DiffableMap, { diffDiffableMaps } from "libs/diffable_map"; +import ErrorHandling from "libs/error_handling"; +import { V3 } from "libs/mjs"; +import createProgressCallback from "libs/progress_callback"; +import type { Message } from "libs/toast"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; import _ from "lodash"; +import memoizeOne from "memoize-one"; +import messages from "messages"; +import { TreeTypeEnum } from "oxalis/constants"; +import { getLayerByName } from "oxalis/model/accessors/dataset_accessor"; +import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; +import { + enforceSkeletonTracing, + findTreeByName, + getActiveNode, + getBranchPoints, + getNodePosition, + getTreeNameForAgglomerateSkeleton, + getTreesWithType, +} from "oxalis/model/accessors/skeletontracing_accessor"; import type { Action } from "oxalis/model/actions/actions"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; import { - actionChannel, - take, - takeEvery, - throttle, - all, - call, - fork, - put, - race, -} from "typed-redux-saga"; + addConnectomeTreesAction, + deleteConnectomeTreesAction, +} from "oxalis/model/actions/connectome_actions"; +import { + setAdditionalCoordinatesAction, + setPositionAction, + setRotationAction, +} from "oxalis/model/actions/flycam_actions"; +import type { LoadAgglomerateSkeletonAction } from "oxalis/model/actions/skeletontracing_actions"; +import { + addTreesAndGroupsAction, + deleteBranchPointAction, + setTreeNameAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import EdgeCollection, { diffEdgeCollections } from "oxalis/model/edge_collection"; +import { parseProtoTracing } from "oxalis/model/helpers/proto_helpers"; +import { + createMutableTreeMapFromTreeArray, + generateTreeName, +} from "oxalis/model/reducers/skeletontracing_reducer_helpers"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; import type { UpdateActionWithoutIsolationRequirement } from "oxalis/model/sagas/update_actions"; -import { TreeTypeEnum } from "oxalis/constants"; import { createEdge, createNode, @@ -23,43 +54,15 @@ import { deleteEdge, deleteNode, deleteTree, - updateTreeVisibility, - updateTreeEdgesVisibility, updateNode, updateSkeletonTracing, - updateUserBoundingBoxesInSkeletonTracing, updateTree, + updateTreeEdgesVisibility, updateTreeGroups, + updateTreeVisibility, + updateUserBoundingBoxesInSkeletonTracing, } from "oxalis/model/sagas/update_actions"; -import { V3 } from "libs/mjs"; -import type { LoadAgglomerateSkeletonAction } from "oxalis/model/actions/skeletontracing_actions"; -import { - deleteBranchPointAction, - setTreeNameAction, - addTreesAndGroupsAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { - generateTreeName, - createMutableTreeMapFromTreeArray, -} from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { - getActiveNode, - getBranchPoints, - enforceSkeletonTracing, - findTreeByName, - getTreeNameForAgglomerateSkeleton, - getTreesWithType, - getNodePosition, -} from "oxalis/model/accessors/skeletontracing_accessor"; -import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; -import { - setAdditionalCoordinatesAction, - setPositionAction, - setRotationAction, -} from "oxalis/model/actions/flycam_actions"; -import DiffableMap, { diffDiffableMaps } from "libs/diffable_map"; -import EdgeCollection, { diffEdgeCollections } from "oxalis/model/edge_collection"; -import ErrorHandling from "libs/error_handling"; +import { api } from "oxalis/singletons"; import type { Flycam, Node, @@ -70,21 +73,18 @@ import type { TreeMap, } from "oxalis/store"; import Store from "oxalis/store"; -import type { Message } from "libs/toast"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import { api } from "oxalis/singletons"; -import messages from "messages"; -import { getLayerByName } from "oxalis/model/accessors/dataset_accessor"; -import { getAgglomerateSkeleton, getEditableAgglomerateSkeleton } from "admin/admin_rest_api"; -import { parseProtoTracing } from "oxalis/model/helpers/proto_helpers"; -import createProgressCallback from "libs/progress_callback"; import { - addConnectomeTreesAction, - deleteConnectomeTreesAction, -} from "oxalis/model/actions/connectome_actions"; + actionChannel, + all, + call, + fork, + put, + race, + take, + takeEvery, + throttle, +} from "typed-redux-saga"; import type { ServerSkeletonTracing } from "types/api_flow_types"; -import memoizeOne from "memoize-one"; import { ensureWkReady } from "./ready_sagas"; function* centerActiveNode(action: Action): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx index 57d39051852..2557c6fb239 100644 --- a/frontend/javascripts/oxalis/model/sagas/task_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/task_saga.tsx @@ -1,31 +1,31 @@ -import React from "react"; -import _ from "lodash"; +import { updateLastTaskTypeIdOfUser } from "admin/admin_rest_api"; import { Button } from "antd"; -import type { APITaskType } from "types/api_flow_types"; -import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { select } from "oxalis/model/sagas/effect-generators"; -import { call, put, delay, take } from "typed-redux-saga"; +import renderIndependently from "libs/render_independently"; +import Toast from "libs/toast"; import { clamp } from "libs/utils"; +import _ from "lodash"; +import messages from "messages"; +import { getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; import { getValidTaskZoomRange, isMagRestrictionViolated, } from "oxalis/model/accessors/flycam_accessor"; -import { getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import { setMergerModeEnabledAction } from "oxalis/model/actions/skeletontracing_actions"; import { setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { updateDatasetSettingAction, - updateUserSettingAction, updateLayerSettingAction, + updateUserSettingAction, } from "oxalis/model/actions/settings_actions"; -import { updateLastTaskTypeIdOfUser } from "admin/admin_rest_api"; +import { setMergerModeEnabledAction } from "oxalis/model/actions/skeletontracing_actions"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import type { Saga } from "oxalis/model/sagas/effect-generators"; +import { select } from "oxalis/model/sagas/effect-generators"; +import Store, { type RecommendedConfiguration } from "oxalis/store"; import NewTaskDescriptionModal from "oxalis/view/new_task_description_modal"; import RecommendedConfigurationModal from "oxalis/view/recommended_configuration_modal"; -import Store, { type RecommendedConfiguration } from "oxalis/store"; -import Toast from "libs/toast"; -import messages from "messages"; -import renderIndependently from "libs/render_independently"; +import React from "react"; +import { call, delay, put, take } from "typed-redux-saga"; +import type { APITaskType } from "types/api_flow_types"; import { ensureWkReady } from "./ready_sagas"; function* maybeShowNewTaskTypeModal(taskType: APITaskType): Saga { diff --git a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts index 89fdeaef6a7..a5408abf720 100644 --- a/frontend/javascripts/oxalis/model/sagas/undo_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/undo_saga.ts @@ -18,23 +18,23 @@ import { import type { RedoAction, UndoAction } from "oxalis/model/actions/save_actions"; import type { SkeletonTracingAction } from "oxalis/model/actions/skeletontracing_actions"; import { + SkeletonTracingSaveRelevantActions, centerActiveNodeAction, setTracingAction, - SkeletonTracingSaveRelevantActions, } from "oxalis/model/actions/skeletontracing_actions"; import { setBusyBlockingInfoAction } from "oxalis/model/actions/ui_actions"; import { type AddBucketToUndoAction, type BatchUpdateGroupsAndSegmentsAction, - cancelQuickSelectAction, type FinishAnnotationStrokeAction, type ImportVolumeTracingAction, type MaybeUnmergedBucketLoadedPromise, type RemoveSegmentAction, - setSegmentGroupsAction, type SetSegmentGroupsAction, - setSegmentsAction, type UpdateSegmentAction, + cancelQuickSelectAction, + setSegmentGroupsAction, + setSegmentsAction, } from "oxalis/model/actions/volumetracing_actions"; import type { BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; import { diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 108313abd7e..7d6d351466d 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -1,18 +1,18 @@ -import type { SendBucketInfo } from "oxalis/model/bucket_data_handling/wkstore_adapter"; import type { Vector3 } from "oxalis/constants"; +import type { SendBucketInfo } from "oxalis/model/bucket_data_handling/wkstore_adapter"; +import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; import type { - VolumeTracing, - Tree, Node, + NumberLike, + SegmentGroup, + Tree, TreeGroup, UserBoundingBox, - SegmentGroup, - NumberLike, + VolumeTracing, } from "oxalis/store"; -import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; import type { - AdditionalCoordinate, APIMagRestrictions, + AdditionalCoordinate, MetadataEntryProto, } from "types/api_flow_types"; diff --git a/frontend/javascripts/oxalis/model/sagas/user_saga.ts b/frontend/javascripts/oxalis/model/sagas/user_saga.ts index f1d3b191da6..a8fe5b4d41d 100644 --- a/frontend/javascripts/oxalis/model/sagas/user_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/user_saga.ts @@ -1,6 +1,6 @@ -import type { SetActiveUser } from "../actions/user_actions"; import { showVerificationReminderToast } from "admin/auth/verify_email_view"; import { takeEvery } from "typed-redux-saga"; +import type { SetActiveUser } from "../actions/user_actions"; export function* warnIfEmailIsUnverified() { yield* takeEvery("SET_ACTIVE_USER", function handler(action: SetActiveUser) { diff --git a/frontend/javascripts/oxalis/model/sagas/volume/floodfill_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volume/floodfill_saga.tsx index dd1d17b692e..9274d616b9b 100644 --- a/frontend/javascripts/oxalis/model/sagas/volume/floodfill_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volume/floodfill_saga.tsx @@ -4,22 +4,23 @@ import Toast from "libs/toast"; import * as Utils from "libs/utils"; import type { BoundingBoxType, + FillMode, LabeledVoxelsMap, OrthoView, Vector2, Vector3, - FillMode, } from "oxalis/constants"; import Constants, { FillModeEnum, Unicode } from "oxalis/constants"; +import _ from "lodash"; import { getDatasetBoundingBox, getMagInfo } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagIndexForLayer } from "oxalis/model/accessors/flycam_accessor"; import { enforceActiveVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; import { setBusyBlockingInfoAction } from "oxalis/model/actions/ui_actions"; import { - finishAnnotationStrokeAction, type FloodFillAction, + finishAnnotationStrokeAction, updateSegmentAction, } from "oxalis/model/actions/volumetracing_actions"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; @@ -31,7 +32,6 @@ import { Model } from "oxalis/singletons"; import { call, put, takeEvery } from "typed-redux-saga"; import { getUserBoundingBoxesThatContainPosition } from "../../accessors/tracing_accessor"; import { applyLabeledVoxelMapToAllMissingMags } from "./helpers"; -import _ from "lodash"; const NO_FLOODFILL_BBOX_TOAST_KEY = "NO_FLOODFILL_BBOX"; const NO_SUCCESS_MSG_WHEN_WITHIN_MS = 500; diff --git a/frontend/javascripts/oxalis/model/sagas/volume/helpers.ts b/frontend/javascripts/oxalis/model/sagas/volume/helpers.ts index 63e6fe197ed..0af435082ff 100644 --- a/frontend/javascripts/oxalis/model/sagas/volume/helpers.ts +++ b/frontend/javascripts/oxalis/model/sagas/volume/helpers.ts @@ -10,23 +10,23 @@ import Constants, { type Vector3, } from "oxalis/constants"; import { getDatasetBoundingBox, getMagInfo } from "oxalis/model/accessors/dataset_accessor"; +import { getFlooredPosition } from "oxalis/model/accessors/flycam_accessor"; +import { enforceActiveVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; +import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; +import Dimensions, { type DimensionMap } from "oxalis/model/dimensions"; +import type { MagInfo } from "oxalis/model/helpers/mag_info"; +import { zoomedPositionToZoomedAddress } from "oxalis/model/helpers/position_converter"; import type { Saga } from "oxalis/model/sagas/effect-generators"; import { select } from "oxalis/model/sagas/effect-generators"; import { getHalfViewportExtentsInVx } from "oxalis/model/sagas/saga_selectors"; -import { call } from "typed-redux-saga"; import sampleVoxelMapToMagnification, { applyVoxelMap, } from "oxalis/model/volumetracing/volume_annotation_sampling"; -import Dimensions, { type DimensionMap } from "oxalis/model/dimensions"; -import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import { Model } from "oxalis/singletons"; import VolumeLayer, { type VoxelBuffer2D } from "oxalis/model/volumetracing/volumelayer"; -import { enforceActiveVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import { Model } from "oxalis/singletons"; import type { BoundingBoxObject, VolumeTracing } from "oxalis/store"; -import { getFlooredPosition } from "oxalis/model/accessors/flycam_accessor"; -import { zoomedPositionToZoomedAddress } from "oxalis/model/helpers/position_converter"; -import type { MagInfo } from "oxalis/model/helpers/mag_info"; +import { call } from "typed-redux-saga"; function* pairwise(arr: Array): Generator<[T, T], any, any> { for (let i = 0; i < arr.length - 1; i++) { diff --git a/frontend/javascripts/oxalis/model/sagas/volume/volume_interpolation_saga.ts b/frontend/javascripts/oxalis/model/sagas/volume/volume_interpolation_saga.ts index 3d1690b9363..712c016589b 100644 --- a/frontend/javascripts/oxalis/model/sagas/volume/volume_interpolation_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/volume/volume_interpolation_saga.ts @@ -37,8 +37,8 @@ import type { VoxelBuffer2D } from "oxalis/model/volumetracing/volumelayer"; import { Model, api } from "oxalis/singletons"; import type { OxalisState } from "oxalis/store"; import { call, put } from "typed-redux-saga"; -import { createVolumeLayer, getBoundingBoxForViewport, labelWithVoxelBuffer2D } from "./helpers"; import { requestBucketModificationInVolumeTracing } from "../saga_helpers"; +import { createVolumeLayer, getBoundingBoxForViewport, labelWithVoxelBuffer2D } from "./helpers"; /* * This saga is capable of doing segment interpolation between two slices. diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index d7b17288db7..34ac2975e4f 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -69,11 +69,11 @@ import { takeEveryUnlessBusy, } from "oxalis/model/sagas/saga_helpers"; import { + type UpdateActionWithoutIsolationRequirement, createSegmentVolumeAction, deleteSegmentDataVolumeAction, deleteSegmentVolumeAction, removeFallbackLayer, - type UpdateActionWithoutIsolationRequirement, updateMappingName, updateSegmentGroups, updateSegmentVolumeAction, @@ -84,12 +84,12 @@ import type VolumeLayer from "oxalis/model/volumetracing/volumelayer"; import { Model, api } from "oxalis/singletons"; import type { Flycam, SegmentMap, VolumeTracing } from "oxalis/store"; import type { ActionPattern } from "redux-saga/effects"; -import { ensureWkReady } from "./ready_sagas"; import { actionChannel, call, fork, put, takeEvery, takeLatest } from "typed-redux-saga"; import { pushSaveQueueTransaction } from "../actions/save_actions"; -import { createVolumeLayer, labelWithVoxelBuffer2D, type BooleanBox } from "./volume/helpers"; -import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_saga"; +import { ensureWkReady } from "./ready_sagas"; import { floodFill } from "./volume/floodfill_saga"; +import { type BooleanBox, createVolumeLayer, labelWithVoxelBuffer2D } from "./volume/helpers"; +import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_saga"; const OVERWRITE_EMPTY_WARNING_KEY = "OVERWRITE-EMPTY-WARNING"; diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.ts b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.ts index 89cf8ec610f..c2fa996d638 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.ts +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.ts @@ -1,10 +1,10 @@ +import { map3 } from "libs/utils"; import _ from "lodash"; -import type { Vector3, LabeledVoxelsMap, BucketAddress } from "oxalis/constants"; +import messages from "messages"; +import type { BucketAddress, LabeledVoxelsMap, Vector3 } from "oxalis/constants"; import constants from "oxalis/constants"; -import { map3 } from "libs/utils"; -import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import type { Bucket } from "oxalis/model/bucket_data_handling/bucket"; -import messages from "messages"; +import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import type { DimensionMap } from "oxalis/model/dimensions"; function upsampleVoxelMap( diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.ts b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.ts index 81d4ffaff79..9f823003d95 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.ts +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.ts @@ -1,21 +1,21 @@ -import _ from "lodash"; +import Drawing from "libs/drawing"; import { V2, V3 } from "libs/mjs"; -import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; -import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; +import Toast from "libs/toast"; +import _ from "lodash"; +import messages from "messages"; +import type { AnnotationTool, OrthoView, Vector2, Vector3 } from "oxalis/constants"; +import Constants, { OrthoViews, Vector3Indicies, Vector2Indicies } from "oxalis/constants"; import { isBrushTool } from "oxalis/model/accessors/tool_accessor"; +import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; +import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; +import Dimensions from "oxalis/model/dimensions"; import { scaleGlobalPositionWithMagnification, scaleGlobalPositionWithMagnificationFloat, zoomedPositionToGlobalPosition, } from "oxalis/model/helpers/position_converter"; -import type { OrthoView, Vector2, Vector3, AnnotationTool } from "oxalis/constants"; -import Constants, { OrthoViews, Vector3Indicies, Vector2Indicies } from "oxalis/constants"; -import Dimensions from "oxalis/model/dimensions"; -import Drawing from "libs/drawing"; +import { getBaseVoxelFactorsInUnit } from "oxalis/model/scaleinfo"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import messages from "messages"; -import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; /* A VoxelBuffer2D instance holds a two dimensional slice diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 6a4ee496a58..e0a138c14fb 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -1,89 +1,91 @@ -import _ from "lodash"; -import type { - APIAnnotation, - APIDataset, - MutableAPIDataset, - APIDataLayer, - ServerVolumeTracing, - ServerTracing, - ServerEditableMapping, - APICompoundType, - APISegmentationLayer, - APITracingStoreAnnotation, -} from "types/api_flow_types"; import { - computeDataTexturesSetup, - getSupportedTextureSpecs, - validateMinimumRequirements, -} from "oxalis/model/bucket_data_handling/data_rendering_logic"; + getAnnotationCompoundInformation, + getAnnotationProto, + getDataset, + getDatasetViewConfiguration, + getEditableMappingInfo, + getEmptySandboxAnnotationInformation, + getSharingTokenFromUrlParameters, + getTracingsForAnnotation, + getUnversionedAnnotationInformation, + getUserConfiguration, +} from "admin/admin_rest_api"; +import { + PricingPlanEnum, + isFeatureAllowedByPricingPlan, +} from "admin/organization/pricing_plan_utils"; +import ErrorHandling from "libs/error_handling"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import messages from "messages"; +import constants, { ControlModeEnum, AnnotationToolEnum, type Vector3 } from "oxalis/constants"; +import type { PartialUrlManagerState, UrlStateByLayer } from "oxalis/controller/url_manager"; +import UrlManager from "oxalis/controller/url_manager"; import { determineAllowedModes, getBitDepth, - getDatasetBoundingBox, getDataLayers, + getDatasetBoundingBox, getDatasetCenter, - hasSegmentation, - isElementClassSupported, - isSegmentationLayer, - getSegmentationLayers, getLayerByName, getSegmentationLayerByName, + getSegmentationLayers, getUnifiedAdditionalCoordinates, + hasSegmentation, + isElementClassSupported, + isSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; import { getNullableSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; -import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_accessor"; import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; -import { - getTracingsForAnnotation, - getUnversionedAnnotationInformation, - getEmptySandboxAnnotationInformation, - getDataset, - getSharingTokenFromUrlParameters, - getUserConfiguration, - getDatasetViewConfiguration, - getEditableMappingInfo, - getAnnotationCompoundInformation, - getAnnotationProto, -} from "admin/admin_rest_api"; +import { getServerVolumeTracings } from "oxalis/model/accessors/volumetracing_accessor"; import { dispatchMaybeFetchMeshFilesAsync, initializeAnnotationAction, updateCurrentMeshFileAction, } from "oxalis/model/actions/annotation_actions"; import { - initializeSettingsAction, + setActiveConnectomeAgglomerateIdsAction, + updateCurrentConnectomeFileAction, +} from "oxalis/model/actions/connectome_actions"; +import { setDatasetAction } from "oxalis/model/actions/dataset_actions"; +import { + setAdditionalCoordinatesAction, + setPositionAction, + setRotationAction, + setZoomStepAction, +} from "oxalis/model/actions/flycam_actions"; +import { + loadAdHocMeshAction, + loadPrecomputedMeshAction, +} from "oxalis/model/actions/segmentation_actions"; +import { initializeGpuSetupAction, + initializeSettingsAction, setControlModeAction, - setViewModeAction, setMappingAction, - updateLayerSettingAction, setMappingEnabledAction, + setViewModeAction, + updateLayerSettingAction, } from "oxalis/model/actions/settings_actions"; import { - initializeEditableMappingAction, - initializeVolumeTracingAction, -} from "oxalis/model/actions/volumetracing_actions"; -import { - setActiveNodeAction, initializeSkeletonTracingAction, loadAgglomerateSkeletonAction, + setActiveNodeAction, setShowSkeletonsAction, } from "oxalis/model/actions/skeletontracing_actions"; -import { setDatasetAction } from "oxalis/model/actions/dataset_actions"; -import { - setPositionAction, - setZoomStepAction, - setRotationAction, - setAdditionalCoordinatesAction, -} from "oxalis/model/actions/flycam_actions"; import { setTaskAction } from "oxalis/model/actions/task_actions"; import { setToolAction } from "oxalis/model/actions/ui_actions"; import { - loadAdHocMeshAction, - loadPrecomputedMeshAction, -} from "oxalis/model/actions/segmentation_actions"; + initializeEditableMappingAction, + initializeVolumeTracingAction, +} from "oxalis/model/actions/volumetracing_actions"; +import { + computeDataTexturesSetup, + getSupportedTextureSpecs, + validateMinimumRequirements, +} from "oxalis/model/bucket_data_handling/data_rendering_logic"; import DataLayer from "oxalis/model/data_layer"; -import ErrorHandling from "libs/error_handling"; import type { DatasetConfiguration, DatasetLayerConfiguration, @@ -91,25 +93,23 @@ import type { UserConfiguration, } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import type { PartialUrlManagerState, UrlStateByLayer } from "oxalis/controller/url_manager"; -import UrlManager from "oxalis/controller/url_manager"; -import * as Utils from "libs/utils"; -import constants, { ControlModeEnum, AnnotationToolEnum, type Vector3 } from "oxalis/constants"; -import messages from "messages"; -import { - setActiveConnectomeAgglomerateIdsAction, - updateCurrentConnectomeFileAction, -} from "oxalis/model/actions/connectome_actions"; -import { - PricingPlanEnum, - isFeatureAllowedByPricingPlan, -} from "admin/organization/pricing_plan_utils"; +import type { + APIAnnotation, + APICompoundType, + APIDataLayer, + APIDataset, + APISegmentationLayer, + APITracingStoreAnnotation, + MutableAPIDataset, + ServerEditableMapping, + ServerTracing, + ServerVolumeTracing, +} from "types/api_flow_types"; +import { setVersionNumberAction } from "./model/actions/save_actions"; import { convertServerAdditionalAxesToFrontEnd, convertServerAnnotationToFrontendAnnotation, } from "./model/reducers/reducer_helpers"; -import { setVersionNumberAction } from "./model/actions/save_actions"; export const HANDLED_ERROR = "error_was_handled"; type DataLayerCollection = Record; @@ -152,10 +152,10 @@ export async function initialize( // Only when the newest version is requested (version==null), // the stats are available in unversionedAnnotation. version == null - ? _.find( + ? (_.find( unversionedAnnotation.annotationLayers, (layer) => layer.tracingId === protoLayer.tracingId, - )?.stats ?? {} + )?.stats ?? {}) : {}, }; }); diff --git a/frontend/javascripts/oxalis/shaders/coords.glsl.ts b/frontend/javascripts/oxalis/shaders/coords.glsl.ts index dff7a25cdfe..66b044c8447 100644 --- a/frontend/javascripts/oxalis/shaders/coords.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/coords.glsl.ts @@ -1,4 +1,4 @@ -import { isFlightMode, getW } from "oxalis/shaders/utils.glsl"; +import { getW, isFlightMode } from "oxalis/shaders/utils.glsl"; import type { ShaderModule } from "./shader_module_system"; export const getMagnification: ShaderModule = { code: ` diff --git a/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts b/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts index 68444fa1fdd..84f5d3d74ed 100644 --- a/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts @@ -1,40 +1,40 @@ +import type TPS3D from "libs/thin_plate_spline"; import _ from "lodash"; -import { MAPPING_TEXTURE_WIDTH } from "oxalis/model/bucket_data_handling/mappings"; import type { Vector3 } from "oxalis/constants"; import constants, { ViewModeValuesIndices, OrthoViewIndices } from "oxalis/constants"; -import { - convertCellIdToRGB, - getBrushOverlay, - getCrossHairOverlay, - getSegmentationAlphaIncrement, - getSegmentId, -} from "./segmentation.glsl"; -import { getMaybeFilteredColorOrFallback } from "./filtering.glsl"; +import Constants from "oxalis/constants"; +import { PLANE_SUBDIVISION } from "oxalis/geometries/plane"; +import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; +import { MAPPING_TEXTURE_WIDTH } from "oxalis/model/bucket_data_handling/mappings"; +import { getBlendLayersAdditive, getBlendLayersCover } from "./blending.glsl"; import { getAbsoluteCoords, getMagnification, getWorldCoordUVW, isOutsideOfBoundingBox, } from "./coords.glsl"; +import { getMaybeFilteredColorOrFallback } from "./filtering.glsl"; import { - inverse, - div, - isNan, - transDim, - isFlightMode, - formatNumberAsGLSLFloat, - almostEq, -} from "./utils.glsl"; + convertCellIdToRGB, + getBrushOverlay, + getCrossHairOverlay, + getSegmentId, + getSegmentationAlphaIncrement, +} from "./segmentation.glsl"; import compileShader from "./shader_module_system"; -import Constants from "oxalis/constants"; -import { PLANE_SUBDIVISION } from "oxalis/geometries/plane"; -import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; -import { getBlendLayersAdditive, getBlendLayersCover } from "./blending.glsl"; -import type TPS3D from "libs/thin_plate_spline"; import { generateCalculateTpsOffsetFunction, generateTpsInitialization, } from "./thin_plate_spline.glsl"; +import { + almostEq, + div, + formatNumberAsGLSLFloat, + inverse, + isFlightMode, + isNan, + transDim, +} from "./utils.glsl"; type Params = { globalLayerCount: number; diff --git a/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts b/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts index f86e4bc6a49..8b4e3e0e8ce 100644 --- a/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts @@ -1,17 +1,17 @@ +import type { Vector3, Vector4 } from "oxalis/constants"; import { - hsvToRgb, - jsRgb2hsv, - getElementOfPermutation, - jsGetElementOfPermutation, aaStep, colormapJet, + getElementOfPermutation, + hsvToRgb, jsColormapJet, + jsGetElementOfPermutation, + jsRgb2hsv, } from "oxalis/shaders/utils.glsl"; -import type { Vector3, Vector4 } from "oxalis/constants"; -import type { ShaderModule } from "./shader_module_system"; -import { getRgbaAtIndex } from "./texture_access.glsl"; import { hashCombine } from "./hashing.glsl"; import { attemptMappingLookUp } from "./mappings.glsl"; +import type { ShaderModule } from "./shader_module_system"; +import { getRgbaAtIndex } from "./texture_access.glsl"; export const convertCellIdToRGB: ShaderModule = { requirements: [ diff --git a/frontend/javascripts/oxalis/shaders/texture_access.glsl.ts b/frontend/javascripts/oxalis/shaders/texture_access.glsl.ts index 8e15185537e..ddbc6acfd97 100644 --- a/frontend/javascripts/oxalis/shaders/texture_access.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/texture_access.glsl.ts @@ -1,5 +1,5 @@ import { MAX_ZOOM_STEP_DIFF } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; -import { getMagnificationFactors, getAbsoluteCoords } from "oxalis/shaders/coords.glsl"; +import { getAbsoluteCoords, getMagnificationFactors } from "oxalis/shaders/coords.glsl"; import { hashCombine } from "./hashing.glsl"; import type { ShaderModule } from "./shader_module_system"; import { transDim } from "./utils.glsl"; diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 2aac7e435bb..e877315beaa 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -1,4 +1,45 @@ -import { createStore, applyMiddleware, type Middleware } from "redux"; +import type DiffableMap from "libs/diffable_map"; +import type { Matrix4x4 } from "libs/mjs"; +import type { + AnnotationTool, + BoundingBoxType, + ContourMode, + ControlMode, + FillMode, + InterpolationMode, + MappingStatus, + OrthoView, + OrthoViewWithoutTD, + OverwriteMode, + Rect, + TDViewDisplayMode, + TreeType, + Vector2, + Vector3, + ViewMode, +} from "oxalis/constants"; +import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; +import defaultState from "oxalis/default_state"; +import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import type EdgeCollection from "oxalis/model/edge_collection"; +import actionLoggerMiddleware from "oxalis/model/helpers/action_logger_middleware"; +import overwriteActionMiddleware from "oxalis/model/helpers/overwrite_action_middleware"; +import reduceReducers from "oxalis/model/helpers/reduce_reducers"; +import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; +import ConnectomeReducer from "oxalis/model/reducers/connectome_reducer"; +import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; +import FlycamReducer from "oxalis/model/reducers/flycam_reducer"; +import SaveReducer from "oxalis/model/reducers/save_reducer"; +import SettingsReducer from "oxalis/model/reducers/settings_reducer"; +import SkeletonTracingReducer from "oxalis/model/reducers/skeletontracing_reducer"; +import TaskReducer from "oxalis/model/reducers/task_reducer"; +import UiReducer from "oxalis/model/reducers/ui_reducer"; +import UserReducer from "oxalis/model/reducers/user_reducer"; +import ViewModeReducer from "oxalis/model/reducers/view_mode_reducer"; +import VolumeTracingReducer from "oxalis/model/reducers/volumetracing_reducer"; +import type { UpdateAction } from "oxalis/model/sagas/update_actions"; +import { type Middleware, applyMiddleware, createStore } from "redux"; import { enableBatching } from "redux-batched-actions"; import createSagaMiddleware, { type Saga } from "redux-saga"; import type { @@ -7,10 +48,12 @@ import type { APIAnnotationVisibility, APIConnectomeFile, APIDataLayer, + APIDataSourceId, APIDataStore, APIDataset, - APIDataSourceId, APIHistogramData, + APIMeshFile, + APIOrganization, APIRestrictions, APIScript, APISettings, @@ -18,57 +61,14 @@ import type { APITracingStore, APIUser, APIUserBase, - AnnotationLayerDescriptor, - TracingType, - APIMeshFile, - ServerEditableMapping, - APIOrganization, APIUserCompact, - AdditionalCoordinate, AdditionalAxis, + AdditionalCoordinate, + AnnotationLayerDescriptor, MetadataEntryProto, + ServerEditableMapping, + TracingType, } from "types/api_flow_types"; -import type { TracingStats } from "oxalis/model/accessors/annotation_accessor"; -import type { Action } from "oxalis/model/actions/actions"; -import type { - BoundingBoxType, - ContourMode, - OverwriteMode, - FillMode, - ControlMode, - TDViewDisplayMode, - ViewMode, - OrthoView, - Rect, - Vector2, - Vector3, - AnnotationTool, - MappingStatus, - OrthoViewWithoutTD, - InterpolationMode, - TreeType, -} from "oxalis/constants"; -import type { BLEND_MODES, ControlModeEnum } from "oxalis/constants"; -import type { Matrix4x4 } from "libs/mjs"; -import type { UpdateAction } from "oxalis/model/sagas/update_actions"; -import AnnotationReducer from "oxalis/model/reducers/annotation_reducer"; -import DatasetReducer from "oxalis/model/reducers/dataset_reducer"; -import type DiffableMap from "libs/diffable_map"; -import type EdgeCollection from "oxalis/model/edge_collection"; -import FlycamReducer from "oxalis/model/reducers/flycam_reducer"; -import SaveReducer from "oxalis/model/reducers/save_reducer"; -import SettingsReducer from "oxalis/model/reducers/settings_reducer"; -import SkeletonTracingReducer from "oxalis/model/reducers/skeletontracing_reducer"; -import TaskReducer from "oxalis/model/reducers/task_reducer"; -import UiReducer from "oxalis/model/reducers/ui_reducer"; -import UserReducer from "oxalis/model/reducers/user_reducer"; -import ViewModeReducer from "oxalis/model/reducers/view_mode_reducer"; -import VolumeTracingReducer from "oxalis/model/reducers/volumetracing_reducer"; -import actionLoggerMiddleware from "oxalis/model/helpers/action_logger_middleware"; -import defaultState from "oxalis/default_state"; -import overwriteActionMiddleware from "oxalis/model/helpers/overwrite_action_middleware"; -import reduceReducers from "oxalis/model/helpers/reduce_reducers"; -import ConnectomeReducer from "oxalis/model/reducers/connectome_reducer"; import OrganizationReducer from "./model/reducers/organization_reducer"; import type { StartAIJobModalState } from "./view/action-bar/starting_job_modals"; diff --git a/frontend/javascripts/oxalis/throttled_store.ts b/frontend/javascripts/oxalis/throttled_store.ts index ea33403cf14..3b5bb663ea8 100644 --- a/frontend/javascripts/oxalis/throttled_store.ts +++ b/frontend/javascripts/oxalis/throttled_store.ts @@ -1,9 +1,9 @@ -/* eslint no-await-in-loop: 0 */ -import type { Store as StoreType } from "redux"; import Deferred from "libs/async/deferred"; +import * as Utils from "libs/utils"; import type { OxalisState } from "oxalis/store"; import Store from "oxalis/store"; -import * as Utils from "libs/utils"; +/* eslint no-await-in-loop: 0 */ +import type { Store as StoreType } from "redux"; const MAXIMUM_STORE_UPDATE_DELAY = 10000; const listeners: Array<() => void> = []; let waitForUpdate = new Deferred(); diff --git a/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx b/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx index 5c19d7a00ae..180ecc1ccde 100644 --- a/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx @@ -1,11 +1,23 @@ import { Alert, Checkbox, Col, Divider, Modal, Radio, Row, Space, Tooltip } from "antd"; -import { useSelector } from "react-redux"; import React, { useState } from "react"; +import { useSelector } from "react-redux"; import { startRenderAnimationJob } from "admin/admin_rest_api"; import Toast from "libs/toast"; import Store, { type MeshInformation, type OxalisState, type UserBoundingBox } from "oxalis/store"; +import { InfoCircleOutlined } from "@ant-design/icons"; +import { + PricingPlanEnum, + isFeatureAllowedByPricingPlan, +} from "admin/organization/pricing_plan_utils"; +import { LayerSelection } from "components/layer_selection"; +import { PricingEnforcedSpan } from "components/pricing_enforcers"; +import { + computeBoundingBoxFromBoundingBoxObject, + computeBoundingBoxObjectFromBoundingBox, +} from "libs/utils"; +import type { Vector3 } from "oxalis/constants"; import { getColorLayers, getEffectiveIntensityRange, @@ -13,30 +25,18 @@ import { getMagInfo, is2dDataset, } from "oxalis/model/accessors/dataset_accessor"; -import { - computeBoundingBoxFromBoundingBoxObject, - computeBoundingBoxObjectFromBoundingBox, -} from "libs/utils"; +import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; import { getUserBoundingBoxesFromState } from "oxalis/model/accessors/tracing_accessor"; +import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { - CAMERA_POSITIONS, - type RenderAnimationOptions, - MOVIE_RESOLUTIONS, type APIDataLayer, APIJobType, type APISegmentationLayer, + CAMERA_POSITIONS, + MOVIE_RESOLUTIONS, + type RenderAnimationOptions, } from "types/api_flow_types"; -import { InfoCircleOutlined } from "@ant-design/icons"; -import { PricingEnforcedSpan } from "components/pricing_enforcers"; -import { - PricingPlanEnum, - isFeatureAllowedByPricingPlan, -} from "admin/organization/pricing_plan_utils"; -import type { Vector3 } from "oxalis/constants"; -import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import { BoundingBoxSelection } from "./starting_job_modals"; -import { LayerSelection } from "components/layer_selection"; -import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; type Props = { isOpen: boolean; diff --git a/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.tsx b/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.tsx index d449c2c8ad1..69049585aca 100644 --- a/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.tsx @@ -1,23 +1,23 @@ -import { Space } from "antd"; import { PushpinOutlined, ReloadOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; -import type React from "react"; -import { PureComponent } from "react"; -import type { APIDataset } from "types/api_flow_types"; +import { Space } from "antd"; +import FastTooltip from "components/fast_tooltip"; import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; import { Vector3Input } from "libs/vector_input"; +import message from "messages"; +import type { Vector3, ViewMode } from "oxalis/constants"; +import constants from "oxalis/constants"; +import { getDatasetExtentInVoxel } from "oxalis/model/accessors/dataset_accessor"; import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; import { setPositionAction, setRotationAction } from "oxalis/model/actions/flycam_actions"; -import { getDatasetExtentInVoxel } from "oxalis/model/accessors/dataset_accessor"; -import ButtonComponent from "oxalis/view/components/button_component"; -import type { OxalisState, Flycam, Task } from "oxalis/store"; +import type { Flycam, OxalisState, Task } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import type { ViewMode, Vector3 } from "oxalis/constants"; -import constants from "oxalis/constants"; -import message from "messages"; import { ShareButton } from "oxalis/view/action-bar/share_modal_view"; -import FastTooltip from "components/fast_tooltip"; +import ButtonComponent from "oxalis/view/components/button_component"; +import type React from "react"; +import { PureComponent } from "react"; +import { connect } from "react-redux"; +import type { APIDataset } from "types/api_flow_types"; type Props = { flycam: Flycam; diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index 5d7083a5596..ae6bdf767bf 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -1,64 +1,64 @@ +import { CopyOutlined } from "@ant-design/icons"; import { + doWithToken, + downloadAnnotation, + downloadWithFilename, + getAuthToken, + startExportTiffJob, +} from "admin/admin_rest_api"; +import { useStartAndPollJob } from "admin/job/job_hooks"; +import { + Alert, + Button, + Checkbox, + Col, Divider, Modal, - Checkbox, + Radio, Row, - Col, Tabs, - Typography, - Button, - Radio, - Alert, - Tooltip, type TabsProps, + Tooltip, + Typography, } from "antd"; -import { CopyOutlined } from "@ant-design/icons"; -import type React from "react"; -import { useState } from "react"; +import { LayerSelection } from "components/layer_selection"; +import features from "features"; +import { formatCountToDataAmountUnit, formatScale } from "libs/format_utils"; import { makeComponentLazy, useFetch } from "libs/react_helpers"; -import { - APIJobType, - type VoxelSize, - type AdditionalAxis, - type APIDataLayer, - type APIDataset, -} from "types/api_flow_types"; import Toast from "libs/toast"; +import { + computeArrayFromBoundingBox, + computeBoundingBoxFromBoundingBoxObject, + computeShapeFromBoundingBox, +} from "libs/utils"; import messages from "messages"; -import { Model } from "oxalis/singletons"; -import features from "features"; +import type { BoundingBoxType, Vector3 } from "oxalis/constants"; import { - doWithToken, - downloadAnnotation, - downloadWithFilename, - getAuthToken, - startExportTiffJob, -} from "admin/admin_rest_api"; -import { BoundingBoxSelection, MagSlider } from "oxalis/view/action-bar/starting_job_modals"; + getByteCountFromLayer, + getDataLayers, + getLayerByName, + getMagInfo, +} from "oxalis/model/accessors/dataset_accessor"; +import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; import { getUserBoundingBoxesFromState } from "oxalis/model/accessors/tracing_accessor"; import { getReadableNameOfVolumeLayer, getVolumeTracingById, hasVolumeTracings, } from "oxalis/model/accessors/volumetracing_accessor"; -import { - getByteCountFromLayer, - getDataLayers, - getLayerByName, - getMagInfo, -} from "oxalis/model/accessors/dataset_accessor"; -import { useSelector } from "react-redux"; +import { Model } from "oxalis/singletons"; import type { HybridTracing, OxalisState, UserBoundingBox } from "oxalis/store"; +import { BoundingBoxSelection, MagSlider } from "oxalis/view/action-bar/starting_job_modals"; +import type React from "react"; +import { useState } from "react"; +import { useSelector } from "react-redux"; import { - computeArrayFromBoundingBox, - computeBoundingBoxFromBoundingBoxObject, - computeShapeFromBoundingBox, -} from "libs/utils"; -import { formatCountToDataAmountUnit, formatScale } from "libs/format_utils"; -import type { BoundingBoxType, Vector3 } from "oxalis/constants"; -import { useStartAndPollJob } from "admin/job/job_hooks"; -import { LayerSelection } from "components/layer_selection"; -import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; + type APIDataLayer, + type APIDataset, + APIJobType, + type AdditionalAxis, + type VoxelSize, +} from "types/api_flow_types"; const { Paragraph, Text } = Typography; type TabKeys = "download" | "export" | "python"; diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index ad9e746d336..159e3a20568 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -1,28 +1,28 @@ -import { Alert, Modal, Button, Select, Form, Spin, Tooltip } from "antd"; -import { connect } from "react-redux"; -import type React from "react"; -import { PureComponent } from "react"; -import type { Dispatch } from "redux"; -import { type APIAnnotation, APIAnnotationTypeEnum } from "types/api_flow_types"; -import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_actions"; -import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; -import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { - getUnversionedAnnotationInformation, getAnnotationCompoundInformation, getTracingForAnnotationType, + getUnversionedAnnotationInformation, } from "admin/admin_rest_api"; -import { location } from "libs/window"; -import InputComponent from "oxalis/view/components/input_component"; +import { Alert, Button, Form, Modal, Select, Spin, Tooltip } from "antd"; +import { makeComponentLazy } from "libs/react_helpers"; import Request from "libs/request"; -import Constants from "oxalis/constants"; -import type { OxalisState, MutableTreeMap, TreeGroup } from "oxalis/store"; -import Store from "oxalis/store"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; -import { api } from "oxalis/singletons"; +import { location } from "libs/window"; import messages from "messages"; -import { makeComponentLazy } from "libs/react_helpers"; +import Constants from "oxalis/constants"; +import { getSkeletonDescriptor } from "oxalis/model/accessors/skeletontracing_accessor"; +import { addTreesAndGroupsAction } from "oxalis/model/actions/skeletontracing_actions"; +import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; +import { api } from "oxalis/singletons"; +import type { MutableTreeMap, OxalisState, TreeGroup } from "oxalis/store"; +import Store from "oxalis/store"; +import InputComponent from "oxalis/view/components/input_component"; +import type React from "react"; +import { PureComponent } from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; +import { type APIAnnotation, APIAnnotationTypeEnum } from "types/api_flow_types"; type ProjectInfo = { id: string; label: string; diff --git a/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx b/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx index bed6e40ac78..a40e754721f 100644 --- a/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx @@ -1,17 +1,24 @@ +import { + CopyOutlined, + DeleteOutlined, + DownOutlined, + EditOutlined, + InfoCircleOutlined, + PlusOutlined, +} from "@ant-design/icons"; +import { + useIsFetching, + useIsMutating, + useMutation, + useQuery, + useQueryClient, +} from "@tanstack/react-query"; import { createPrivateLink, deletePrivateLink, getPrivateLinksByAnnotation, updatePrivateLink, } from "admin/admin_rest_api"; -import { - useQuery, - useMutation, - useQueryClient, - useIsFetching, - useIsMutating, -} from "@tanstack/react-query"; -import Toast from "libs/toast"; import { Button, DatePicker, @@ -26,24 +33,17 @@ import { Table, Tooltip, } from "antd"; -import { - CopyOutlined, - DeleteOutlined, - DownOutlined, - EditOutlined, - InfoCircleOutlined, - PlusOutlined, -} from "@ant-design/icons"; -import type { ZarrPrivateLink } from "types/api_flow_types"; +import type { ColumnsType } from "antd/lib/table"; import { AsyncButton, AsyncIconButton } from "components/async_clickables"; -import dayjs from "dayjs"; import FormattedDate from "components/formatted_date"; -import type { ColumnsType } from "antd/lib/table"; +import dayjs from "dayjs"; import { makeComponentLazy } from "libs/react_helpers"; -import type { OxalisState } from "oxalis/store"; -import { useSelector } from "react-redux"; +import Toast from "libs/toast"; import { getDataLayers } from "oxalis/model/accessors/dataset_accessor"; import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; +import type { OxalisState } from "oxalis/store"; +import { useSelector } from "react-redux"; +import type { ZarrPrivateLink } from "types/api_flow_types"; // TODO Remove explicit (error) type declaration when updating to tanstack/query >= 5 // https://github.com/TanStack/query/pull/4706 diff --git a/frontend/javascripts/oxalis/view/action-bar/quick_select_settings.tsx b/frontend/javascripts/oxalis/view/action-bar/quick_select_settings.tsx index 97187d8ca9f..dcf70e2b2e4 100644 --- a/frontend/javascripts/oxalis/view/action-bar/quick_select_settings.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/quick_select_settings.tsx @@ -1,20 +1,20 @@ +import { QuestionCircleOutlined } from "@ant-design/icons"; +import { Radio, type RadioChangeEvent } from "antd"; +import FastTooltip from "components/fast_tooltip"; +import features from "features"; +import Shortcut from "libs/shortcut_component"; +import defaultState from "oxalis/default_state"; +import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; +import { showQuickSelectSettingsAction } from "oxalis/model/actions/ui_actions"; import { cancelQuickSelectAction, confirmQuickSelectAction, fineTuneQuickSelectAction, } from "oxalis/model/actions/volumetracing_actions"; -import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; -import { useDispatch, useSelector } from "react-redux"; import type { OxalisState } from "oxalis/store"; -import defaultState from "oxalis/default_state"; -import Shortcut from "libs/shortcut_component"; -import { Radio, type RadioChangeEvent } from "antd"; -import { NumberSliderSetting, SwitchSetting } from "../components/setting_input_views"; +import { useDispatch, useSelector } from "react-redux"; import ButtonComponent from "../components/button_component"; -import { showQuickSelectSettingsAction } from "oxalis/model/actions/ui_actions"; -import features from "features"; -import FastTooltip from "components/fast_tooltip"; -import { QuestionCircleOutlined } from "@ant-design/icons"; +import { NumberSliderSetting, SwitchSetting } from "../components/setting_input_views"; // The maximum depth of 16 also needs to be adapted in the back-end // (at the time of writing, in segmentAnythingMask in DatasetController.scala). diff --git a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx index e16722e51f9..602d8318e83 100644 --- a/frontend/javascripts/oxalis/view/action-bar/save_button.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/save_button.tsx @@ -1,21 +1,21 @@ -import { connect } from "react-redux"; -import React from "react"; -import _ from "lodash"; -import Store, { type SaveState } from "oxalis/store"; -import type { OxalisState } from "oxalis/store"; -import ButtonComponent from "oxalis/view/components/button_component"; -import { Model } from "oxalis/singletons"; -import window from "libs/window"; import { CheckOutlined, ExclamationCircleOutlined, HourglassOutlined, LoadingOutlined, } from "@ant-design/icons"; -import ErrorHandling from "libs/error_handling"; -import FastTooltip from "components/fast_tooltip"; import { Tooltip } from "antd"; +import FastTooltip from "components/fast_tooltip"; +import ErrorHandling from "libs/error_handling"; +import window from "libs/window"; +import _ from "lodash"; import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; +import { Model } from "oxalis/singletons"; +import Store, { type SaveState } from "oxalis/store"; +import type { OxalisState } from "oxalis/store"; +import ButtonComponent from "oxalis/view/components/button_component"; +import React from "react"; +import { connect } from "react-redux"; type OwnProps = { onClick: (arg0: React.MouseEvent) => Promise; diff --git a/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx index d2c7f16d8bf..bea013e3e48 100644 --- a/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx @@ -1,55 +1,55 @@ -import { - Alert, - Divider, - Radio, - Modal, - Input, - Button, - Row, - Col, - type RadioChangeEvent, - Tooltip, - Space, -} from "antd"; import { CompressOutlined, CopyOutlined, ShareAltOutlined } from "@ant-design/icons"; -import { useSelector } from "react-redux"; -import type React from "react"; -import { useState, useEffect } from "react"; -import type { - APIDataset, - APIAnnotationVisibility, - APIAnnotationType, - APITeam, -} from "types/api_flow_types"; import { + createShortLink, + editAnnotation, getDatasetSharingToken, + getSharingTokenFromUrlParameters, getTeamsForSharedAnnotation, - updateTeamsForSharedAnnotation, - editAnnotation, sendAnalyticsEvent, setOthersMayEditForAnnotation, - getSharingTokenFromUrlParameters, - createShortLink, + updateTeamsForSharedAnnotation, } from "admin/admin_rest_api"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; +import { + Alert, + Button, + Col, + Divider, + Input, + Modal, + Radio, + type RadioChangeEvent, + Row, + Space, + Tooltip, +} from "antd"; +import { AsyncButton } from "components/async_clickables"; +import { PricingEnforcedBlur } from "components/pricing_enforcers"; import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; +import { makeComponentLazy } from "libs/react_helpers"; import Toast from "libs/toast"; import { location } from "libs/window"; import _ from "lodash"; import messages from "messages"; -import Store, { type OxalisState } from "oxalis/store"; +import { ControlModeEnum } from "oxalis/constants"; import UrlManager from "oxalis/controller/url_manager"; +import { mayEditAnnotationProperties } from "oxalis/model/accessors/annotation_accessor"; +import { formatUserName } from "oxalis/model/accessors/user_accessor"; import { setAnnotationVisibilityAction, setOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; import { setShareModalVisibilityAction } from "oxalis/model/actions/ui_actions"; -import { ControlModeEnum } from "oxalis/constants"; -import { makeComponentLazy } from "libs/react_helpers"; -import { AsyncButton } from "components/async_clickables"; -import { PricingEnforcedBlur } from "components/pricing_enforcers"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; -import { mayEditAnnotationProperties } from "oxalis/model/accessors/annotation_accessor"; -import { formatUserName } from "oxalis/model/accessors/user_accessor"; +import Store, { type OxalisState } from "oxalis/store"; +import type React from "react"; +import { useEffect, useState } from "react"; +import { useSelector } from "react-redux"; +import type { + APIAnnotationType, + APIAnnotationVisibility, + APIDataset, + APITeam, +} from "types/api_flow_types"; const RadioGroup = Radio.Group; const sharingActiveNode = true; diff --git a/frontend/javascripts/oxalis/view/action-bar/share_view_dataset_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/share_view_dataset_modal_view.tsx index 4b6af0c9ffe..2291b9c99d5 100644 --- a/frontend/javascripts/oxalis/view/action-bar/share_view_dataset_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/share_view_dataset_modal_view.tsx @@ -1,11 +1,11 @@ import { CopyOutlined } from "@ant-design/icons"; -import { Modal, Input, Button, Row, Col, Dropdown, Space } from "antd"; -import { useSelector } from "react-redux"; +import { Button, Col, Dropdown, Input, Modal, Row, Space } from "antd"; import { makeComponentLazy } from "libs/react_helpers"; import messages from "messages"; import type { OxalisState } from "oxalis/store"; -import { useDatasetSharingToken, getUrl, CopyableSharingLink } from "./share_modal_view"; +import { useSelector } from "react-redux"; import { useZarrLinkMenu } from "./private_links_view"; +import { CopyableSharingLink, getUrl, useDatasetSharingToken } from "./share_modal_view"; const sharingActiveNode = false; diff --git a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx index 3fa2fdb69fe..6bccdeb2029 100644 --- a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx @@ -1,64 +1,64 @@ -import React, { useState } from "react"; -import type { APIJob, APIDataLayer } from "types/api_flow_types"; +import { InfoCircleOutlined } from "@ant-design/icons"; import { - Modal, - Select, + getAiModels, + runInferenceJob, + startAlignSectionsJob, + startMaterializingVolumeAnnotationJob, + startMitochondriaInferralJob, + startNeuronInferralJob, + startNucleiInferralJob, +} from "admin/admin_rest_api"; +import { DatasetNameFormItem } from "admin/dataset/dataset_components"; +import { + Alert, Button, + Card, + Checkbox, Form, + type FormInstance, + Modal, + Radio, Row, + Select, Space, - Radio, - Card, - Tooltip, - Alert, - Tabs, Switch, - type FormInstance, - Checkbox, + Tabs, + Tooltip, } from "antd"; -import { - startNucleiInferralJob, - startMaterializingVolumeAnnotationJob, - startNeuronInferralJob, - startMitochondriaInferralJob, - runInferenceJob, - startAlignSectionsJob, - getAiModels, -} from "admin/admin_rest_api"; -import { useDispatch, useSelector } from "react-redux"; -import { DatasetNameFormItem } from "admin/dataset/dataset_components"; -import { getColorLayers, getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; -import { - getActiveSegmentationTracingLayer, - getReadableNameOfVolumeLayer, -} from "oxalis/model/accessors/volumetracing_accessor"; -import { getUserBoundingBoxesFromState } from "oxalis/model/accessors/tracing_accessor"; +import { LayerSelectionFormItem } from "components/layer_selection"; +import { Slider } from "components/slider"; +import features from "features"; +import { V3 } from "libs/mjs"; +import { useGuardedFetch } from "libs/react_helpers"; import Toast from "libs/toast"; -import type { OxalisState, UserBoundingBox } from "oxalis/store"; -import { ControlModeEnum, Unicode, type Vector3 } from "oxalis/constants"; -import { Model, Store } from "oxalis/singletons"; import { clamp, computeArrayFromBoundingBox, computeBoundingBoxFromBoundingBoxObject, rgbToHex, } from "libs/utils"; -import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; -import { V3 } from "libs/mjs"; -import type { MagInfo } from "oxalis/model/helpers/mag_info"; -import { isBoundingBoxExportable } from "./download_modal_view"; -import features from "features"; +import _ from "lodash"; +import { ControlModeEnum, Unicode, type Vector3 } from "oxalis/constants"; +import { getColorLayers, getSegmentationLayers } from "oxalis/model/accessors/dataset_accessor"; +import { getUserBoundingBoxesFromState } from "oxalis/model/accessors/tracing_accessor"; +import { + getActiveSegmentationTracingLayer, + getReadableNameOfVolumeLayer, +} from "oxalis/model/accessors/volumetracing_accessor"; import { setAIJobModalStateAction } from "oxalis/model/actions/ui_actions"; -import { InfoCircleOutlined } from "@ant-design/icons"; +import type { MagInfo } from "oxalis/model/helpers/mag_info"; +import { Model, Store } from "oxalis/singletons"; +import type { OxalisState, UserBoundingBox } from "oxalis/store"; +import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import React, { useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import type { APIDataLayer, APIJob } from "types/api_flow_types"; import { CollapsibleWorkflowYamlEditor, TrainAiModelFromAnnotationTab, } from "../jobs/train_ai_model"; -import { LayerSelectionFormItem } from "components/layer_selection"; -import { useGuardedFetch } from "libs/react_helpers"; -import _ from "lodash"; import DEFAULT_PREDICT_WORKFLOW from "./default-predict-workflow-template"; -import { Slider } from "components/slider"; +import { isBoundingBoxExportable } from "./download_modal_view"; const { ThinSpace } = Unicode; diff --git a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx index a48140cc1b6..60916fd6485 100644 --- a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx @@ -1,16 +1,3 @@ -import { - Radio, - Badge, - Space, - Popover, - type RadioChangeEvent, - Dropdown, - type MenuProps, - Col, - Row, - Divider, - Popconfirm, -} from "antd"; import { ClearOutlined, DownOutlined, @@ -18,22 +5,44 @@ import { InfoCircleOutlined, SettingOutlined, } from "@ant-design/icons"; -import { useSelector, useDispatch } from "react-redux"; +import { + Badge, + Col, + Divider, + Dropdown, + type MenuProps, + Popconfirm, + Popover, + Radio, + type RadioChangeEvent, + Row, + Space, +} from "antd"; import React, { useEffect, useCallback, useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; -import { showToastWarningForLargestSegmentIdMissing } from "oxalis/view/largest_segment_id_modal"; -import { LogSliderSetting } from "oxalis/view/components/setting_input_views"; -import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; +import { useKeyPress, usePrevious } from "libs/react_hooks"; +import { document } from "libs/window"; import { - interpolateSegmentationLayerAction, - createCellAction, - setMousePositionAction, -} from "oxalis/model/actions/volumetracing_actions"; + type AnnotationTool, + AnnotationToolEnum, + FillModeEnum, + type InterpolationMode, + InterpolationModeEnum, + MappingStatusEnum, + MeasurementTools, + type OverwriteMode, + OverwriteModeEnum, + ToolsWithInterpolationCapabilities, + ToolsWithOverwriteCapabilities, + Unicode, + VolumeTools, +} from "oxalis/constants"; +import { getActiveTree } from "oxalis/model/accessors/skeletontracing_accessor"; import { - createTreeAction, - setMergerModeEnabledAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { document } from "libs/window"; + adaptActiveToolToShortcuts, + getDisabledInfoForTools, +} from "oxalis/model/accessors/tool_accessor"; import { getActiveSegmentationTracing, getMappingInfoForVolumeTracing, @@ -43,49 +52,40 @@ import { hasAgglomerateMapping, hasEditableMapping, } from "oxalis/model/accessors/volumetracing_accessor"; -import { getActiveTree } from "oxalis/model/accessors/skeletontracing_accessor"; +import { addUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions"; +import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import { - getDisabledInfoForTools, - adaptActiveToolToShortcuts, -} from "oxalis/model/accessors/tool_accessor"; + createTreeAction, + setMergerModeEnabledAction, +} from "oxalis/model/actions/skeletontracing_actions"; import { setToolAction, showQuickSelectSettingsAction } from "oxalis/model/actions/ui_actions"; -import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; -import { usePrevious, useKeyPress } from "libs/react_hooks"; -import { userSettings } from "types/schemas/user_settings.schema"; -import ButtonComponent from "oxalis/view/components/button_component"; -import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; import { - ToolsWithOverwriteCapabilities, - AnnotationToolEnum, - OverwriteModeEnum, - FillModeEnum, - VolumeTools, - MappingStatusEnum, - type AnnotationTool, - type OverwriteMode, - ToolsWithInterpolationCapabilities, - InterpolationModeEnum, - type InterpolationMode, - Unicode, - MeasurementTools, -} from "oxalis/constants"; + createCellAction, + interpolateSegmentationLayerAction, + setMousePositionAction, +} from "oxalis/model/actions/volumetracing_actions"; import { Model } from "oxalis/singletons"; import Store, { type BrushPresets, type OxalisState } from "oxalis/store"; +import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; +import ButtonComponent from "oxalis/view/components/button_component"; +import { LogSliderSetting } from "oxalis/view/components/setting_input_views"; +import { showToastWarningForLargestSegmentIdMissing } from "oxalis/view/largest_segment_id_modal"; +import { userSettings } from "types/schemas/user_settings.schema"; +import { updateNovelUserExperienceInfos } from "admin/admin_rest_api"; +import FastTooltip from "components/fast_tooltip"; import features from "features"; +import { useIsActiveUserAdminOrManager } from "libs/react_helpers"; +import defaultState from "oxalis/default_state"; +import { getViewportExtents } from "oxalis/model/accessors/view_mode_accessor"; +import { ensureLayerMappingsAreLoadedAction } from "oxalis/model/actions/dataset_actions"; +import { clearProofreadingByProducts } from "oxalis/model/actions/proofread_actions"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import { getInterpolationInfo } from "oxalis/model/sagas/volume/volume_interpolation_saga"; import { rgbaToCSS } from "oxalis/shaders/utils.glsl"; -import { clearProofreadingByProducts } from "oxalis/model/actions/proofread_actions"; -import { QuickSelectControls } from "./quick_select_settings"; import type { MenuInfo } from "rc-menu/lib/interface"; -import { getViewportExtents } from "oxalis/model/accessors/view_mode_accessor"; -import { ensureLayerMappingsAreLoadedAction } from "oxalis/model/actions/dataset_actions"; import { APIJobType } from "types/api_flow_types"; -import { useIsActiveUserAdminOrManager } from "libs/react_helpers"; -import { updateNovelUserExperienceInfos } from "admin/admin_rest_api"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import FastTooltip from "components/fast_tooltip"; -import defaultState from "oxalis/default_state"; +import { QuickSelectControls } from "./quick_select_settings"; const NARROW_BUTTON_STYLE = { paddingLeft: 10, diff --git a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx index ef0fcf610f0..e93029c0ef4 100644 --- a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx @@ -1,84 +1,84 @@ -import { Button, Dropdown, Modal, Space, Tooltip } from "antd"; import { - HistoryOutlined, CheckCircleOutlined, CheckOutlined, CodeSandboxOutlined, CopyOutlined, DeleteOutlined, DisconnectOutlined, - DownloadOutlined, DownOutlined, + DownloadOutlined, FileAddOutlined, FolderOpenOutlined, + HistoryOutlined, InfoCircleOutlined, LayoutOutlined, LinkOutlined, + LockOutlined, PlusOutlined, RollbackOutlined, SaveOutlined, SettingOutlined, ShareAltOutlined, StopOutlined, + UnlockOutlined, VerticalLeftOutlined, VerticalRightOutlined, - UnlockOutlined, - LockOutlined, } from "@ant-design/icons"; -import { connect } from "react-redux"; -import * as React from "react"; -import type { APIAnnotationType, APIUser, APIUserBase } from "types/api_flow_types"; -import { APIAnnotationTypeEnum, TracingTypeEnum } from "types/api_flow_types"; -import { AsyncButton, type AsyncButtonProps } from "components/async_clickables"; -import type { LayoutKeys } from "oxalis/view/layouting/default_layout_configs"; -import { mapLayoutKeysToLanguage } from "oxalis/view/layouting/default_layout_configs"; import { + createExplorational, duplicateAnnotation, + editLockedState, finishAnnotation, reOpenAnnotation, - createExplorational, - editLockedState, } from "admin/admin_rest_api"; +import { withAuthentication } from "admin/auth/authentication_modal"; +import { Button, Dropdown, Modal, Space, Tooltip } from "antd"; +import type { ItemType, SubMenuType } from "antd/es/menu/interface"; +import { AsyncButton, type AsyncButtonProps } from "components/async_clickables"; +import features from "features"; +import Toast from "libs/toast"; +import UserLocalStorage from "libs/user_local_storage"; +import * as Utils from "libs/utils"; import { location } from "libs/window"; +import messages from "messages"; +import Constants, { ControlModeEnum } from "oxalis/constants"; +import UrlManager from "oxalis/controller/url_manager"; +import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import { getTracingType } from "oxalis/model/accessors/tracing_accessor"; +import { + disableSavingAction, + dispatchRedoAsync, + dispatchUndoAsync, +} from "oxalis/model/actions/save_actions"; +import { setTracingAction } from "oxalis/model/actions/skeletontracing_actions"; import { - setVersionRestoreVisibilityAction, setDownloadModalVisibilityAction, - setShareModalVisibilityAction, setRenderAnimationModalVisibilityAction, + setShareModalVisibilityAction, + setVersionRestoreVisibilityAction, } from "oxalis/model/actions/ui_actions"; -import { setTracingAction } from "oxalis/model/actions/skeletontracing_actions"; -import { enforceSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; +import { Model } from "oxalis/singletons"; +import { api } from "oxalis/singletons"; import type { BusyBlockingInfo, OxalisState, RestrictionsAndSettings, Task } from "oxalis/store"; import Store from "oxalis/store"; -import { - dispatchUndoAsync, - dispatchRedoAsync, - disableSavingAction, -} from "oxalis/model/actions/save_actions"; -import ButtonComponent from "oxalis/view/components/button_component"; -import Constants, { ControlModeEnum } from "oxalis/constants"; +import DownloadModalView from "oxalis/view/action-bar/download_modal_view"; import MergeModalView from "oxalis/view/action-bar/merge_modal_view"; -import { Model } from "oxalis/singletons"; import SaveButton from "oxalis/view/action-bar/save_button"; import ShareModalView from "oxalis/view/action-bar/share_modal_view"; -import DownloadModalView from "oxalis/view/action-bar/download_modal_view"; import UserScriptsModalView from "oxalis/view/action-bar/user_scripts_modal_view"; -import { api } from "oxalis/singletons"; -import messages from "messages"; import { - screenshotMenuItem, renderAnimationMenuItem, + screenshotMenuItem, } from "oxalis/view/action-bar/view_dataset_actions_view"; -import * as Utils from "libs/utils"; -import UserLocalStorage from "libs/user_local_storage"; -import features from "features"; -import { getTracingType } from "oxalis/model/accessors/tracing_accessor"; -import Toast from "libs/toast"; -import UrlManager from "oxalis/controller/url_manager"; -import { withAuthentication } from "admin/auth/authentication_modal"; -import { PrivateLinksModal } from "./private_links_view"; -import type { ItemType, SubMenuType } from "antd/es/menu/interface"; +import ButtonComponent from "oxalis/view/components/button_component"; +import type { LayoutKeys } from "oxalis/view/layouting/default_layout_configs"; +import { mapLayoutKeysToLanguage } from "oxalis/view/layouting/default_layout_configs"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { APIAnnotationType, APIUser, APIUserBase } from "types/api_flow_types"; +import { APIAnnotationTypeEnum, TracingTypeEnum } from "types/api_flow_types"; import CreateAnimationModal from "./create_animation_modal"; +import { PrivateLinksModal } from "./private_links_view"; const AsyncButtonWithAuthentication = withAuthentication( AsyncButton, diff --git a/frontend/javascripts/oxalis/view/action-bar/user_scripts_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/user_scripts_modal_view.tsx index 27910cdbf73..73f9cb040ee 100644 --- a/frontend/javascripts/oxalis/view/action-bar/user_scripts_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/user_scripts_modal_view.tsx @@ -1,13 +1,13 @@ /* eslint-disable no-eval */ -import { Modal, Input, Select, Spin } from "antd"; -import * as React from "react"; -import type { Script } from "oxalis/store"; -import { alert } from "libs/window"; -import { fetchGistContent } from "libs/gist"; +import { Input, Modal, Select, Spin } from "antd"; import { handleGenericError } from "libs/error_handling"; +import { fetchGistContent } from "libs/gist"; +import { makeComponentLazy } from "libs/react_helpers"; import Request from "libs/request"; +import { alert } from "libs/window"; import messages from "messages"; -import { makeComponentLazy } from "libs/react_helpers"; +import type { Script } from "oxalis/store"; +import * as React from "react"; const { TextArea } = Input; diff --git a/frontend/javascripts/oxalis/view/action-bar/view_dataset_actions_view.tsx b/frontend/javascripts/oxalis/view/action-bar/view_dataset_actions_view.tsx index 74723d5c024..439ef88ff1d 100644 --- a/frontend/javascripts/oxalis/view/action-bar/view_dataset_actions_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/view_dataset_actions_view.tsx @@ -1,24 +1,24 @@ -import { useSelector } from "react-redux"; -import { Dropdown, type MenuProps } from "antd"; import { - ShareAltOutlined, - DownOutlined, - VideoCameraOutlined, CameraOutlined, + DownOutlined, DownloadOutlined, + ShareAltOutlined, + VideoCameraOutlined, } from "@ant-design/icons"; -import ButtonComponent from "oxalis/view/components/button_component"; -import ShareViewDatasetModalView from "oxalis/view/action-bar/share_view_dataset_modal_view"; -import { downloadScreenshot } from "oxalis/view/rendering_utils"; +import { Dropdown, type MenuProps } from "antd"; +import type { MenuItemType, SubMenuType } from "antd/es/menu/interface"; import { setPythonClientModalVisibilityAction, - setShareModalVisibilityAction, setRenderAnimationModalVisibilityAction, + setShareModalVisibilityAction, } from "oxalis/model/actions/ui_actions"; import Store, { type OxalisState } from "oxalis/store"; -import type { MenuItemType, SubMenuType } from "antd/es/menu/interface"; -import DownloadModalView from "./download_modal_view"; +import ShareViewDatasetModalView from "oxalis/view/action-bar/share_view_dataset_modal_view"; +import ButtonComponent from "oxalis/view/components/button_component"; +import { downloadScreenshot } from "oxalis/view/rendering_utils"; +import { useSelector } from "react-redux"; import CreateAnimationModal from "./create_animation_modal"; +import DownloadModalView from "./download_modal_view"; type Props = { layoutMenu: SubMenuType; diff --git a/frontend/javascripts/oxalis/view/action-bar/view_modes_view.tsx b/frontend/javascripts/oxalis/view/action-bar/view_modes_view.tsx index cb74404fe05..d8cea72cf5f 100644 --- a/frontend/javascripts/oxalis/view/action-bar/view_modes_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/view_modes_view.tsx @@ -1,16 +1,16 @@ import { Button, Dropdown, type MenuProps, Space } from "antd"; -import { connect } from "react-redux"; -import type { Dispatch } from "redux"; -import { PureComponent } from "react"; +import * as Utils from "libs/utils"; +import { type ViewMode, ViewModeValues } from "oxalis/constants"; +import constants from "oxalis/constants"; import { - setViewModeAction, setFlightmodeRecordingAction, + setViewModeAction, } from "oxalis/model/actions/settings_actions"; -import type { OxalisState, AllowedMode } from "oxalis/store"; +import type { AllowedMode, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; -import * as Utils from "libs/utils"; -import { type ViewMode, ViewModeValues } from "oxalis/constants"; -import constants from "oxalis/constants"; +import { PureComponent } from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; import type { EmptyObject } from "types/globals"; type StateProps = { diff --git a/frontend/javascripts/oxalis/view/action_bar_view.tsx b/frontend/javascripts/oxalis/view/action_bar_view.tsx index d011c949e5a..8ef468bbbec 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.tsx +++ b/frontend/javascripts/oxalis/view/action_bar_view.tsx @@ -1,45 +1,45 @@ -import { Alert, Popover } from "antd"; -import { connect, useDispatch, useSelector } from "react-redux"; -import * as React from "react"; -import type { APIDataset, APIUser } from "types/api_flow_types"; import { createExplorational } from "admin/admin_rest_api"; -import { - layoutEmitter, - deleteLayout, - getLayoutConfig, - addNewLayout, -} from "oxalis/view/layouting/layout_persistence"; -import AddNewLayoutModal from "oxalis/view/action-bar/add_new_layout_modal"; import { withAuthentication } from "admin/auth/authentication_modal"; -import { type ViewMode, type ControlMode, MappingStatusEnum } from "oxalis/constants"; +import { Alert, Popover } from "antd"; +import { AsyncButton, type AsyncButtonProps } from "components/async_clickables"; +import { isUserAdminOrTeamManager } from "libs/utils"; +import { ArbitraryVectorInput } from "libs/vector_input"; +import { type ControlMode, MappingStatusEnum, type ViewMode } from "oxalis/constants"; import constants, { ControlModeEnum } from "oxalis/constants"; -import DatasetPositionView from "oxalis/view/action-bar/dataset_position_view"; +import { + doesSupportVolumeWithFallback, + getColorLayers, + getMappingInfoForSupportedLayer, + getUnifiedAdditionalCoordinates, + getVisibleSegmentationLayer, + is2dDataset, +} from "oxalis/model/accessors/dataset_accessor"; +import { setAdditionalCoordinatesAction } from "oxalis/model/actions/flycam_actions"; +import { setAIJobModalStateAction } from "oxalis/model/actions/ui_actions"; import type { OxalisState } from "oxalis/store"; import Store from "oxalis/store"; +import AddNewLayoutModal from "oxalis/view/action-bar/add_new_layout_modal"; +import DatasetPositionView from "oxalis/view/action-bar/dataset_position_view"; +import ToolbarView from "oxalis/view/action-bar/toolbar_view"; import TracingActionsView, { getLayoutMenu, type LayoutProps, } from "oxalis/view/action-bar/tracing_actions_view"; import ViewDatasetActionsView from "oxalis/view/action-bar/view_dataset_actions_view"; import ViewModesView from "oxalis/view/action-bar/view_modes_view"; -import ToolbarView from "oxalis/view/action-bar/toolbar_view"; import { - is2dDataset, - doesSupportVolumeWithFallback, - getVisibleSegmentationLayer, - getMappingInfoForSupportedLayer, - getUnifiedAdditionalCoordinates, - getColorLayers, -} from "oxalis/model/accessors/dataset_accessor"; -import { AsyncButton, type AsyncButtonProps } from "components/async_clickables"; -import { setAdditionalCoordinatesAction } from "oxalis/model/actions/flycam_actions"; -import { NumberSliderSetting } from "./components/setting_input_views"; -import { ArbitraryVectorInput } from "libs/vector_input"; + addNewLayout, + deleteLayout, + getLayoutConfig, + layoutEmitter, +} from "oxalis/view/layouting/layout_persistence"; +import * as React from "react"; +import { connect, useDispatch, useSelector } from "react-redux"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import { APIJobType, type AdditionalCoordinate } from "types/api_flow_types"; +import { StartAIJobModal, type StartAIJobModalState } from "./action-bar/starting_job_modals"; import ButtonComponent from "./components/button_component"; -import { setAIJobModalStateAction } from "oxalis/model/actions/ui_actions"; -import { type StartAIJobModalState, StartAIJobModal } from "./action-bar/starting_job_modals"; -import { isUserAdminOrTeamManager } from "libs/utils"; +import { NumberSliderSetting } from "./components/setting_input_views"; const VersionRestoreWarning = ( void; diff --git a/frontend/javascripts/oxalis/view/components/border_toggle_button.tsx b/frontend/javascripts/oxalis/view/components/border_toggle_button.tsx index cbc605938e9..42c114318c8 100644 --- a/frontend/javascripts/oxalis/view/components/border_toggle_button.tsx +++ b/frontend/javascripts/oxalis/view/components/border_toggle_button.tsx @@ -1,9 +1,9 @@ -import * as React from "react"; import { Button } from "antd"; -import { connect } from "react-redux"; -import type { OxalisState, BorderOpenStatus } from "oxalis/store"; -import { V2 } from "libs/mjs"; import FastTooltip from "components/fast_tooltip"; +import { V2 } from "libs/mjs"; +import type { BorderOpenStatus, OxalisState } from "oxalis/store"; +import * as React from "react"; +import { connect } from "react-redux"; type OwnProps = { onClick: () => void; side: "left" | "right"; diff --git a/frontend/javascripts/oxalis/view/components/button_component.tsx b/frontend/javascripts/oxalis/view/components/button_component.tsx index 0d40c04f964..5683747854b 100644 --- a/frontend/javascripts/oxalis/view/components/button_component.tsx +++ b/frontend/javascripts/oxalis/view/components/button_component.tsx @@ -1,7 +1,7 @@ import { Button, type ButtonProps } from "antd"; -import * as React from "react"; -import _ from "lodash"; import FastTooltip, { type FastTooltipPlacement } from "components/fast_tooltip"; +import _ from "lodash"; +import * as React from "react"; type ButtonComponentProp = ButtonProps & { faIcon?: string; diff --git a/frontend/javascripts/oxalis/view/components/categorization_label.tsx b/frontend/javascripts/oxalis/view/components/categorization_label.tsx index a623a5b80b5..97c0df820ae 100644 --- a/frontend/javascripts/oxalis/view/components/categorization_label.tsx +++ b/frontend/javascripts/oxalis/view/components/categorization_label.tsx @@ -1,8 +1,8 @@ import { Tag, Tooltip } from "antd"; -import UserLocalStorage from "libs/user_local_storage"; -import { type MouseEventHandler, useEffect } from "react"; import { stringToColor } from "libs/format_utils"; import { useEffectOnlyOnce } from "libs/react_hooks"; +import UserLocalStorage from "libs/user_local_storage"; +import { type MouseEventHandler, useEffect } from "react"; type LabelProps = { tag: string; kind: string; diff --git a/frontend/javascripts/oxalis/view/components/checkbox_component.tsx b/frontend/javascripts/oxalis/view/components/checkbox_component.tsx index 1cc02fe803a..f945ff421ee 100644 --- a/frontend/javascripts/oxalis/view/components/checkbox_component.tsx +++ b/frontend/javascripts/oxalis/view/components/checkbox_component.tsx @@ -1,6 +1,6 @@ import { Checkbox } from "antd"; -import * as React from "react"; import _ from "lodash"; +import * as React from "react"; type CheckboxComponentProp = { onClick?: (...args: Array) => any; }; diff --git a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx index 112e28df557..084dd9d099c 100644 --- a/frontend/javascripts/oxalis/view/components/editable_text_label.tsx +++ b/frontend/javascripts/oxalis/view/components/editable_text_label.tsx @@ -1,11 +1,11 @@ -import { Input, type InputProps } from "antd"; import { CheckOutlined, EditOutlined } from "@ant-design/icons"; -import * as React from "react"; +import { Input, type InputProps } from "antd"; +import FastTooltip from "components/fast_tooltip"; import Markdown from "libs/markdown_adapter"; -import { MarkdownModal } from "oxalis/view/components/markdown_modal"; import Toast from "libs/toast"; +import { MarkdownModal } from "oxalis/view/components/markdown_modal"; +import * as React from "react"; import type { ValidationResult } from "../left-border-tabs/modals/add_volume_layer_modal"; -import FastTooltip from "components/fast_tooltip"; type Rule = { message?: string; diff --git a/frontend/javascripts/oxalis/view/components/input_component.tsx b/frontend/javascripts/oxalis/view/components/input_component.tsx index 6db361a48ce..be9cb1f8a0e 100644 --- a/frontend/javascripts/oxalis/view/components/input_component.tsx +++ b/frontend/javascripts/oxalis/view/components/input_component.tsx @@ -1,7 +1,7 @@ import { Input, type InputProps, type InputRef } from "antd"; -import * as React from "react"; -import _ from "lodash"; import FastTooltip from "components/fast_tooltip"; +import _ from "lodash"; +import * as React from "react"; type InputComponentState = { currentValue: React.InputHTMLAttributes["value"] | bigint; diff --git a/frontend/javascripts/oxalis/view/components/markdown_modal.tsx b/frontend/javascripts/oxalis/view/components/markdown_modal.tsx index 94997418cb0..e4034ac1e80 100644 --- a/frontend/javascripts/oxalis/view/components/markdown_modal.tsx +++ b/frontend/javascripts/oxalis/view/components/markdown_modal.tsx @@ -1,4 +1,4 @@ -import { Alert, Modal, Button, Row, Col, Input } from "antd"; +import { Alert, Button, Col, Input, Modal, Row } from "antd"; import Markdown from "libs/markdown_adapter"; import * as React from "react"; diff --git a/frontend/javascripts/oxalis/view/components/setting_input_views.tsx b/frontend/javascripts/oxalis/view/components/setting_input_views.tsx index 57c6424722e..61b20829b55 100644 --- a/frontend/javascripts/oxalis/view/components/setting_input_views.tsx +++ b/frontend/javascripts/oxalis/view/components/setting_input_views.tsx @@ -1,14 +1,3 @@ -import { - Row, - Col, - InputNumber, - Switch, - Input, - Select, - Popover, - type PopoverProps, - type MenuProps, -} from "antd"; import { BorderInnerOutlined, DeleteOutlined, @@ -18,19 +7,30 @@ import { InfoCircleOutlined, ScanOutlined, } from "@ant-design/icons"; -import * as React from "react"; -import _ from "lodash"; -import type { Vector3, Vector6 } from "oxalis/constants"; +import { + Col, + Input, + InputNumber, + type MenuProps, + Popover, + type PopoverProps, + Row, + Select, + Switch, +} from "antd"; +import FastTooltip from "components/fast_tooltip"; +import { Slider } from "components/slider"; +import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import _ from "lodash"; import messages from "messages"; +import type { Vector3, Vector6 } from "oxalis/constants"; import { getVisibleSegmentationLayer } from "oxalis/model/accessors/dataset_accessor"; -import { connect } from "react-redux"; +import { api } from "oxalis/singletons"; import type { OxalisState } from "oxalis/store"; +import * as React from "react"; +import { connect } from "react-redux"; import type { APISegmentationLayer } from "types/api_flow_types"; -import { api } from "oxalis/singletons"; -import FastTooltip from "components/fast_tooltip"; -import Toast from "libs/toast"; -import { Slider } from "components/slider"; const ROW_GUTTER = 1; diff --git a/frontend/javascripts/oxalis/view/context_menu.tsx b/frontend/javascripts/oxalis/view/context_menu.tsx index 4b9e5a7cd90..1abc5a7ba74 100644 --- a/frontend/javascripts/oxalis/view/context_menu.tsx +++ b/frontend/javascripts/oxalis/view/context_menu.tsx @@ -1,71 +1,65 @@ import { CopyOutlined, PushpinOutlined, ReloadOutlined, WarningOutlined } from "@ant-design/icons"; -import type { Dispatch } from "redux"; +import { getSegmentBoundingBoxes, getSegmentVolumes } from "admin/admin_rest_api"; import { + ConfigProvider, Dropdown, Empty, - notification, - Popover, Input, type MenuProps, Modal, - ConfigProvider, + Popover, + notification, } from "antd"; -import { useSelector } from "react-redux"; -import React, { createContext, type MouseEvent, useContext, useEffect, useState } from "react"; import type { - APIConnectomeFile, - APIDataset, - APIDataLayer, - APIMeshFile, - VoxelSize, -} from "types/api_flow_types"; -import type { - ActiveMappingInfo, - MutableNode, - OxalisState, - SegmentMap, - SkeletonTracing, - Tree, - UserBoundingBox, - VolumeTracing, -} from "oxalis/store"; + ItemType, + MenuItemGroupType, + MenuItemType, + SubMenuType, +} from "antd/es/menu/interface"; +import { AsyncIconButton } from "components/async_clickables"; +import FastTooltip from "components/fast_tooltip"; +import { formatLengthAsVx, formatNumberToLength, formatNumberToVolume } from "libs/format_utils"; +import { V3 } from "libs/mjs"; +import { useFetch } from "libs/react_helpers"; +import Shortcut from "libs/shortcut_component"; +import Toast from "libs/toast"; +import { hexToRgb, rgbToHex, roundTo, truncateStringToLength } from "libs/utils"; +import messages from "messages"; import { + AltOrOptionKey, type AnnotationTool, - type Vector3, - type OrthoView, AnnotationToolEnum, - VolumeTools, - AltOrOptionKey, CtrlOrCmdKey, LongUnitToShortUnitMap, + type OrthoView, type UnitLong, + type Vector3, + VolumeTools, } from "oxalis/constants"; -import { V3 } from "libs/mjs"; import { - loadAdHocMeshAction, - loadPrecomputedMeshAction, -} from "oxalis/model/actions/segmentation_actions"; + loadAgglomerateSkeletonAtPosition, + loadSynapsesOfAgglomerateAtPosition, +} from "oxalis/controller/combinations/segmentation_handlers"; +import { handleCreateNodeFromGlobalPosition } from "oxalis/controller/combinations/skeleton_handlers"; import { - addUserBoundingBoxAction, - deleteUserBoundingBoxAction, - changeUserBoundingBoxAction, - maybeFetchMeshFilesAction, - removeMeshAction, - updateMeshVisibilityAction, - refreshMeshAction, -} from "oxalis/model/actions/annotation_actions"; + getSegmentIdForPosition, + getSegmentIdForPositionAsync, + handleFloodFillFromGlobalPosition, +} from "oxalis/controller/combinations/volume_handlers"; import { - deleteEdgeAction, - mergeTreesAction, - deleteNodeAsUserAction, - setActiveNodeAction, - createTreeAction, - setTreeVisibilityAction, - createBranchPointAction, - deleteBranchpointByIdAction, - addTreesAndGroupsAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { formatNumberToLength, formatLengthAsVx, formatNumberToVolume } from "libs/format_utils"; + getMagInfo, + getMappingInfo, + getMaybeSegmentIndexAvailability, + getVisibleSegmentationLayer, +} from "oxalis/model/accessors/dataset_accessor"; +import { + getNodeAndTree, + getNodeAndTreeOrNull, + getNodePosition, + isSkeletonLayerTransformed, +} from "oxalis/model/accessors/skeletontracing_accessor"; +import { getDisabledInfoForTools } from "oxalis/model/accessors/tool_accessor"; +import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { getActiveCellId, getActiveSegmentationTracing, @@ -75,71 +69,77 @@ import { hasEditableMapping, } from "oxalis/model/accessors/volumetracing_accessor"; import { - getNodeAndTree, - getNodeAndTreeOrNull, - getNodePosition, - isSkeletonLayerTransformed, -} from "oxalis/model/accessors/skeletontracing_accessor"; + addUserBoundingBoxAction, + changeUserBoundingBoxAction, + deleteUserBoundingBoxAction, + maybeFetchMeshFilesAction, + refreshMeshAction, + removeMeshAction, + updateMeshVisibilityAction, +} from "oxalis/model/actions/annotation_actions"; import { - getSegmentIdForPosition, - getSegmentIdForPositionAsync, - handleFloodFillFromGlobalPosition, -} from "oxalis/controller/combinations/volume_handlers"; + ensureLayerMappingsAreLoadedAction, + ensureSegmentIndexIsLoadedAction, +} from "oxalis/model/actions/dataset_actions"; +import { setPositionAction } from "oxalis/model/actions/flycam_actions"; import { - getVisibleSegmentationLayer, - getMappingInfo, - getMagInfo, - getMaybeSegmentIndexAvailability, -} from "oxalis/model/accessors/dataset_accessor"; + cutAgglomerateFromNeighborsAction, + minCutAgglomerateAction, + minCutAgglomerateWithPositionAction, + proofreadMerge, +} from "oxalis/model/actions/proofread_actions"; import { - loadAgglomerateSkeletonAtPosition, - loadSynapsesOfAgglomerateAtPosition, -} from "oxalis/controller/combinations/segmentation_handlers"; -import { isBoundingBoxUsableForMinCut } from "oxalis/model/sagas/min_cut_saga"; + loadAdHocMeshAction, + loadPrecomputedMeshAction, +} from "oxalis/model/actions/segmentation_actions"; import { - getVolumeRequestUrl, - withMappingActivationConfirmation, -} from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; -import { maybeGetSomeTracing } from "oxalis/model/accessors/tracing_accessor"; + addTreesAndGroupsAction, + createBranchPointAction, + createTreeAction, + deleteBranchpointByIdAction, + deleteEdgeAction, + deleteNodeAsUserAction, + mergeTreesAction, + setActiveNodeAction, + setTreeVisibilityAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { hideContextMenuAction, setActiveUserBoundingBoxId } from "oxalis/model/actions/ui_actions"; import { clickSegmentAction, performMinCutAction, setActiveCellAction, } from "oxalis/model/actions/volumetracing_actions"; -import { roundTo, hexToRgb, rgbToHex, truncateStringToLength } from "libs/utils"; -import { handleCreateNodeFromGlobalPosition } from "oxalis/controller/combinations/skeleton_handlers"; -import Shortcut from "libs/shortcut_component"; -import Toast from "libs/toast"; -import { api } from "oxalis/singletons"; -import messages from "messages"; import { extractPathAsNewTree } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; +import { isBoundingBoxUsableForMinCut } from "oxalis/model/sagas/min_cut_saga"; +import { getBoundingBoxInMag1 } from "oxalis/model/sagas/volume/helpers"; +import { voxelToVolumeInUnit } from "oxalis/model/scaleinfo"; +import { api } from "oxalis/singletons"; +import type { + ActiveMappingInfo, + MutableNode, + OxalisState, + SegmentMap, + SkeletonTracing, + Tree, + UserBoundingBox, + VolumeTracing, +} from "oxalis/store"; import Store from "oxalis/store"; import { - minCutAgglomerateAction, - minCutAgglomerateWithPositionAction, - cutAgglomerateFromNeighborsAction, - proofreadMerge, -} from "oxalis/model/actions/proofread_actions"; -import { setPositionAction } from "oxalis/model/actions/flycam_actions"; + getVolumeRequestUrl, + withMappingActivationConfirmation, +} from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import React, { createContext, type MouseEvent, useContext, useEffect, useState } from "react"; +import { useSelector } from "react-redux"; +import type { Dispatch } from "redux"; import type { - ItemType, - MenuItemGroupType, - MenuItemType, - SubMenuType, -} from "antd/es/menu/interface"; -import { getSegmentBoundingBoxes, getSegmentVolumes } from "admin/admin_rest_api"; -import { useFetch } from "libs/react_helpers"; -import { AsyncIconButton } from "components/async_clickables"; + APIConnectomeFile, + APIDataLayer, + APIDataset, + APIMeshFile, + VoxelSize, +} from "types/api_flow_types"; import type { AdditionalCoordinate } from "types/api_flow_types"; -import { voxelToVolumeInUnit } from "oxalis/model/scaleinfo"; -import { getBoundingBoxInMag1 } from "oxalis/model/sagas/volume/helpers"; -import { - ensureLayerMappingsAreLoadedAction, - ensureSegmentIndexIsLoadedAction, -} from "oxalis/model/actions/dataset_actions"; -import { hideContextMenuAction, setActiveUserBoundingBoxId } from "oxalis/model/actions/ui_actions"; -import { getDisabledInfoForTools } from "oxalis/model/accessors/tool_accessor"; -import FastTooltip from "components/fast_tooltip"; import { LoadMeshMenuItemLabel } from "./right-border-tabs/segments_tab/load_mesh_menu_item_label"; type ContextMenuContextValue = React.MutableRefObject | null; diff --git a/frontend/javascripts/oxalis/view/distance_measurement_tooltip.tsx b/frontend/javascripts/oxalis/view/distance_measurement_tooltip.tsx index 85ded86875a..4bb5f5a8d1f 100644 --- a/frontend/javascripts/oxalis/view/distance_measurement_tooltip.tsx +++ b/frontend/javascripts/oxalis/view/distance_measurement_tooltip.tsx @@ -1,30 +1,30 @@ -import { useDispatch, useSelector } from "react-redux"; -import { useEffect, useRef } from "react"; -import type { OxalisState } from "oxalis/store"; -import { - AnnotationToolEnum, - MeasurementTools, - LongUnitToShortUnitMap, - type Vector3, -} from "oxalis/constants"; -import { getPosition } from "oxalis/model/accessors/flycam_accessor"; -import { hideMeasurementTooltipAction } from "oxalis/model/actions/ui_actions"; -import getSceneController from "oxalis/controller/scene_controller_provider"; import { CopyOutlined } from "@ant-design/icons"; import { copyToClipboad } from "admin/voxelytics/utils"; +import { Tooltip } from "antd"; import { - formatNumberToLength, - formatLengthAsVx, formatAreaAsVx, + formatLengthAsVx, formatNumberToArea, + formatNumberToLength, } from "libs/format_utils"; -import { Tooltip } from "antd"; +import { clamp } from "libs/utils"; +import { + AnnotationToolEnum, + LongUnitToShortUnitMap, + MeasurementTools, + type Vector3, +} from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; +import { getPosition } from "oxalis/model/accessors/flycam_accessor"; import { calculateMaybePlaneScreenPos, getInputCatcherRect, } from "oxalis/model/accessors/view_mode_accessor"; -import { clamp } from "libs/utils"; +import { hideMeasurementTooltipAction } from "oxalis/model/actions/ui_actions"; import dimensions from "oxalis/model/dimensions"; +import type { OxalisState } from "oxalis/store"; +import { useEffect, useRef } from "react"; +import { useDispatch, useSelector } from "react-redux"; const TOOLTIP_HEIGHT = 48; const ADDITIONAL_OFFSET = 12; diff --git a/frontend/javascripts/oxalis/view/help_modal.tsx b/frontend/javascripts/oxalis/view/help_modal.tsx index fc5faef87e2..236ad7a9dee 100644 --- a/frontend/javascripts/oxalis/view/help_modal.tsx +++ b/frontend/javascripts/oxalis/view/help_modal.tsx @@ -1,5 +1,5 @@ import { sendHelpEmail, updateNovelUserExperienceInfos } from "admin/admin_rest_api"; -import { Modal, Input, Alert, message } from "antd"; +import { Alert, Input, Modal, message } from "antd"; import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import type { OxalisState } from "oxalis/store"; import type React from "react"; diff --git a/frontend/javascripts/oxalis/view/input_catcher.tsx b/frontend/javascripts/oxalis/view/input_catcher.tsx index 7c1a4233c60..9527899d054 100644 --- a/frontend/javascripts/oxalis/view/input_catcher.tsx +++ b/frontend/javascripts/oxalis/view/input_catcher.tsx @@ -1,5 +1,6 @@ +import { useEffectOnlyOnce, useKeyPress } from "libs/react_hooks"; +import { waitForCondition } from "libs/utils"; import _ from "lodash"; -import type * as React from "react"; import type { Rect, Viewport } from "oxalis/constants"; import { AnnotationToolEnum, @@ -7,17 +8,16 @@ import { ArbitraryViews, OrthoViews, } from "oxalis/constants"; +import { adaptActiveToolToShortcuts } from "oxalis/model/accessors/tool_accessor"; import { setInputCatcherRects } from "oxalis/model/actions/view_mode_actions"; -import Scalebar from "oxalis/view/scalebar"; -import ViewportStatusIndicator from "oxalis/view/viewport_status_indicator"; import type { BusyBlockingInfo, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; import makeRectRelativeToCanvas from "oxalis/view/layouting/layout_canvas_adapter"; -import { waitForCondition } from "libs/utils"; -import { useEffectOnlyOnce, useKeyPress } from "libs/react_hooks"; +import Scalebar from "oxalis/view/scalebar"; +import ViewportStatusIndicator from "oxalis/view/viewport_status_indicator"; +import type * as React from "react"; import { useRef } from "react"; import { useSelector } from "react-redux"; -import { adaptActiveToolToShortcuts } from "oxalis/model/accessors/tool_accessor"; const emptyViewportRect = { top: 0, diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index d0478e6b302..a0e6faa1a57 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -1,53 +1,53 @@ -import React, { useRef, useState } from "react"; +import { + getDataset, + getTracingForAnnotationType, + getUnversionedAnnotationInformation, + runTraining, +} from "admin/admin_rest_api"; import { Alert, - Form, - Row, + Button, + Checkbox, Col, + Collapse, + Form, + type FormInstance, Input, - Button, + Row, Select, - Collapse, Tooltip, - Checkbox, - type FormInstance, } from "antd"; -import { useSelector } from "react-redux"; -import type { HybridTracing, OxalisState, UserBoundingBox, VolumeTracing } from "oxalis/store"; -import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; +import { LayerSelection, LayerSelectionFormItem } from "components/layer_selection"; +import { MagSelectionFormItem } from "components/mag_selection"; +import { formatVoxels } from "libs/format_utils"; +import { V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import { computeArrayFromBoundingBox } from "libs/utils"; +import _ from "lodash"; +import type { Vector3, Vector6 } from "oxalis/constants"; import { getColorLayers, getMagInfo, getSegmentationLayers, } from "oxalis/model/accessors/dataset_accessor"; -import { - getUnversionedAnnotationInformation, - getDataset, - getTracingForAnnotationType, - runTraining, -} from "admin/admin_rest_api"; -import { LayerSelection, LayerSelectionFormItem } from "components/layer_selection"; -import Toast from "libs/toast"; -import { Model } from "oxalis/singletons"; +import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { getSegmentationLayerByHumanReadableName } from "oxalis/model/accessors/volumetracing_accessor"; -import _ from "lodash"; import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; -import { formatVoxels } from "libs/format_utils"; -import * as Utils from "libs/utils"; +import { MagInfo } from "oxalis/model/helpers/mag_info"; +import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; +import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; +import { Model } from "oxalis/singletons"; +import type { HybridTracing, OxalisState, UserBoundingBox, VolumeTracing } from "oxalis/store"; +import React, { useRef, useState } from "react"; +import { useSelector } from "react-redux"; import { - AnnotationLayerEnum, - type APIDataLayer, type APIAnnotation, + type APIDataLayer, type APIDataset, + AnnotationLayerEnum, type ServerVolumeTracing, } from "types/api_flow_types"; -import type { Vector3, Vector6 } from "oxalis/constants"; -import { serverVolumeToClientVolumeTracing } from "oxalis/model/reducers/volumetracing_reducer"; -import { convertUserBoundingBoxesFromServerToFrontend } from "oxalis/model/reducers/reducer_helpers"; -import { computeArrayFromBoundingBox } from "libs/utils"; -import { MagSelectionFormItem } from "components/mag_selection"; -import { MagInfo } from "oxalis/model/helpers/mag_info"; -import { V3 } from "libs/mjs"; const { TextArea } = Input; const FormItem = Form.Item; diff --git a/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx b/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx index 4e6f495d208..f4885d4cd2c 100644 --- a/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx +++ b/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx @@ -1,20 +1,20 @@ -import * as React from "react"; -import { Button, Modal, InputNumber } from "antd"; -import { useDispatch, useSelector } from "react-redux"; -import { - createCellAction, - setLargestSegmentIdAction, -} from "oxalis/model/actions/volumetracing_actions"; +import { Button, InputNumber, Modal } from "antd"; import renderIndependently from "libs/render_independently"; import Toast from "libs/toast"; -import Store from "oxalis/throttled_store"; -import type { OxalisState, VolumeTracing } from "oxalis/store"; import { mayUserEditDataset } from "libs/utils"; import { getBitDepth, getReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; import { getSegmentationLayerForTracing, getVolumeTracingByLayerName, } from "oxalis/model/accessors/volumetracing_accessor"; +import { + createCellAction, + setLargestSegmentIdAction, +} from "oxalis/model/actions/volumetracing_actions"; +import type { OxalisState, VolumeTracing } from "oxalis/store"; +import Store from "oxalis/throttled_store"; +import * as React from "react"; +import { useDispatch, useSelector } from "react-redux"; import type { APISegmentationLayer } from "types/api_flow_types"; const TOAST_KEY = "enter-largest-segment-id"; diff --git a/frontend/javascripts/oxalis/view/layouting/default_layout_configs.ts b/frontend/javascripts/oxalis/view/layouting/default_layout_configs.ts index fdb79b7cc92..111d6fa66a4 100644 --- a/frontend/javascripts/oxalis/view/layouting/default_layout_configs.ts +++ b/frontend/javascripts/oxalis/view/layouting/default_layout_configs.ts @@ -1,3 +1,5 @@ +import { getIsInIframe } from "libs/utils"; +import * as Utils from "libs/utils"; /* * This file defines: * - the main tabs which can be arranged in WK Core @@ -5,7 +7,6 @@ * - a `determineLayout` function which decides which layout type has to be chosen */ import _ from "lodash"; -import { getIsInIframe } from "libs/utils"; import type { BorderTabType, ControlMode, ViewMode } from "oxalis/constants"; import Constants, { ArbitraryViews, @@ -15,17 +16,16 @@ import Constants, { OrthoViews, OrthoViewsToName, } from "oxalis/constants"; -import * as Utils from "libs/utils"; +import { Store } from "oxalis/singletons"; import type { - RowOrTabsetNode, - RowNode, - TabsetNode, - TabNode, - GlobalConfig, Border, + GlobalConfig, ModelConfig, + RowNode, + RowOrTabsetNode, + TabNode, + TabsetNode, } from "./flex_layout_types"; -import { Store } from "oxalis/singletons"; // Increment this number to invalidate old layoutConfigs in localStorage export const currentLayoutVersion = 15; const layoutHeaderHeight = 20; diff --git a/frontend/javascripts/oxalis/view/layouting/flex_layout_helper.ts b/frontend/javascripts/oxalis/view/layouting/flex_layout_helper.ts index 07422c22029..6bc857aa40d 100644 --- a/frontend/javascripts/oxalis/view/layouting/flex_layout_helper.ts +++ b/frontend/javascripts/oxalis/view/layouting/flex_layout_helper.ts @@ -1,4 +1,4 @@ -import { type Model, Actions, type TabSetNode, type BorderNode } from "flexlayout-react"; +import { Actions, type BorderNode, type Model, type TabSetNode } from "flexlayout-react"; import type { BorderOpenStatus } from "oxalis/store"; import type { ModelConfig } from "./flex_layout_types"; diff --git a/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx b/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx index 59b34a3300a..81f4edc5a52 100644 --- a/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx +++ b/frontend/javascripts/oxalis/view/layouting/flex_layout_wrapper.tsx @@ -1,49 +1,49 @@ -import type { Dispatch } from "redux"; +import { sendAnalyticsEvent } from "admin/admin_rest_api"; import { Layout } from "antd"; -import { connect } from "react-redux"; +import FastTooltip from "components/fast_tooltip"; +import features from "features"; import * as FlexLayout from "flexlayout-react"; import type { BorderNode, TabNode, TabSetNode } from "flexlayout-react"; -import * as React from "react"; +import { InputKeyboardNoLoop } from "libs/input"; +import Toast from "libs/toast"; import _ from "lodash"; -import features from "features"; +import messages from "messages"; +import type { OrthoView } from "oxalis/constants"; +import { ArbitraryViews, BorderTabs, OrthoViews } from "oxalis/constants"; +import { setBorderOpenStatusAction } from "oxalis/model/actions/ui_actions"; +import { setViewportAction } from "oxalis/model/actions/view_mode_actions"; +import type { BorderOpenStatus, BusyBlockingInfo, OxalisState } from "oxalis/store"; +import Store from "oxalis/store"; +import InputCatcher from "oxalis/view/input_catcher"; import type { LayoutKeys } from "oxalis/view/layouting/default_layout_configs"; import { DEFAULT_LAYOUT_NAME, getTabDescriptorForBorderTab, resetDefaultLayouts, } from "oxalis/view/layouting/default_layout_configs"; -import { InputKeyboardNoLoop } from "libs/input"; -import type { OrthoView } from "oxalis/constants"; -import { OrthoViews, ArbitraryViews, BorderTabs } from "oxalis/constants"; -import { sendAnalyticsEvent } from "admin/admin_rest_api"; -import { setBorderOpenStatusAction } from "oxalis/model/actions/ui_actions"; -import { setViewportAction } from "oxalis/model/actions/view_mode_actions"; +import ControlsAndRenderingSettingsTab from "oxalis/view/left-border-tabs/controls_and_rendering_settings_tab"; +import LayerSettingsTab from "oxalis/view/left-border-tabs/layer_settings_tab"; +import RecordingSwitch from "oxalis/view/recording_switch"; import AbstractTreeTab from "oxalis/view/right-border-tabs/abstract_tree_tab"; import BoundingBoxTab from "oxalis/view/right-border-tabs/bounding_box_tab"; import CommentTabView from "oxalis/view/right-border-tabs/comment_tab/comment_tab_view"; import ConnectomeView from "oxalis/view/right-border-tabs/connectome_tab/connectome_view"; -import ControlsAndRenderingSettingsTab from "oxalis/view/left-border-tabs/controls_and_rendering_settings_tab"; import DatasetInfoTabView from "oxalis/view/right-border-tabs/dataset_info_tab_view"; -import InputCatcher from "oxalis/view/input_catcher"; -import LayerSettingsTab from "oxalis/view/left-border-tabs/layer_settings_tab"; -import RecordingSwitch from "oxalis/view/recording_switch"; import SegmentsView from "oxalis/view/right-border-tabs/segments_tab/segments_view"; import SkeletonTabView from "oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view"; import Statusbar from "oxalis/view/statusbar"; -import type { OxalisState, BusyBlockingInfo, BorderOpenStatus } from "oxalis/store"; -import Store from "oxalis/store"; import TDViewControls from "oxalis/view/td_view_controls"; -import Toast from "libs/toast"; -import messages from "messages"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; +import BorderToggleButton from "../components/border_toggle_button"; import { - getMaximizedItemId, - getBorderOpenStatus, adjustModelToBorderOpenStatus, + getBorderOpenStatus, + getMaximizedItemId, getPositionStatusOf, } from "./flex_layout_helper"; -import { layoutEmitter, getLayoutConfig } from "./layout_persistence"; -import BorderToggleButton from "../components/border_toggle_button"; -import FastTooltip from "components/fast_tooltip"; +import { getLayoutConfig, layoutEmitter } from "./layout_persistence"; const { Footer } = Layout; diff --git a/frontend/javascripts/oxalis/view/layouting/layout_canvas_adapter.ts b/frontend/javascripts/oxalis/view/layouting/layout_canvas_adapter.ts index 19cbadf5c78..2f38f8e5b89 100644 --- a/frontend/javascripts/oxalis/view/layouting/layout_canvas_adapter.ts +++ b/frontend/javascripts/oxalis/view/layouting/layout_canvas_adapter.ts @@ -1,5 +1,5 @@ -import type { Rect } from "oxalis/constants"; import { document } from "libs/window"; +import type { Rect } from "oxalis/constants"; export default function makeRectRelativeToCanvas(rect: Rect): Rect { const layoutContainerDOM = document.getElementById("render-canvas"); diff --git a/frontend/javascripts/oxalis/view/layouting/layout_persistence.ts b/frontend/javascripts/oxalis/view/layouting/layout_persistence.ts index 6ccb65bd323..223b47ae1df 100644 --- a/frontend/javascripts/oxalis/view/layouting/layout_persistence.ts +++ b/frontend/javascripts/oxalis/view/layouting/layout_persistence.ts @@ -1,11 +1,11 @@ -import { createNanoEvents } from "nanoevents"; -import _ from "lodash"; +import Toast from "libs/toast"; +import UserLocalStorage from "libs/user_local_storage"; import { getIsInIframe } from "libs/utils"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import _ from "lodash"; +import { createNanoEvents } from "nanoevents"; import { setStoredLayoutsAction } from "oxalis/model/actions/ui_actions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import UserLocalStorage from "libs/user_local_storage"; import type { LayoutKeys } from "./default_layout_configs"; import getDefaultLayouts, { currentLayoutVersion, diff --git a/frontend/javascripts/oxalis/view/layouting/portal_utils.tsx b/frontend/javascripts/oxalis/view/layouting/portal_utils.tsx index b49a1d770af..ef4b5abf477 100644 --- a/frontend/javascripts/oxalis/view/layouting/portal_utils.tsx +++ b/frontend/javascripts/oxalis/view/layouting/portal_utils.tsx @@ -1,6 +1,6 @@ +import { document } from "libs/window"; import * as React from "react"; import ReactDOM from "react-dom"; -import { document } from "libs/window"; // The actual content of a layout pane is a portal target, // to which is rendered within GoldenLayoutAdapter. diff --git a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx index 4971fe971ce..f915a7a2691 100644 --- a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx +++ b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx @@ -1,4 +1,5 @@ import { Layout } from "antd"; +import app from "app"; import ErrorHandling from "libs/error_handling"; import Request from "libs/request"; import Toast from "libs/toast"; @@ -16,6 +17,7 @@ import { Store } from "oxalis/singletons"; import type { OxalisState, Theme, TraceOrViewCommand } from "oxalis/store"; import ActionBarView from "oxalis/view/action_bar_view"; import WkContextMenu from "oxalis/view/context_menu"; +import DistanceMeasurementTooltip from "oxalis/view/distance_measurement_tooltip"; import { initializeInputCatcherSizes, recalculateInputCatcherSizes, @@ -39,12 +41,10 @@ import { connect } from "react-redux"; import { type RouteComponentProps, withRouter } from "react-router-dom"; import type { Dispatch } from "redux"; import type { APICompoundType } from "types/api_flow_types"; -import DistanceMeasurementTooltip from "oxalis/view/distance_measurement_tooltip"; import TabTitle from "../components/tab_title_component"; import { determineLayout } from "./default_layout_configs"; import FlexLayoutWrapper from "./flex_layout_wrapper"; import { FloatingMobileControls } from "./floating_mobile_controls"; -import app from "app"; const { Sider } = Layout; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/controls_and_rendering_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/controls_and_rendering_settings_tab.tsx index 060fa54423e..3767aef438a 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/controls_and_rendering_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/controls_and_rendering_settings_tab.tsx @@ -1,36 +1,36 @@ +import { ExclamationCircleOutlined } from "@ant-design/icons"; +import { clearCache } from "admin/admin_rest_api"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import { Collapse, type CollapseProps } from "antd"; -import type { Dispatch } from "redux"; -import { connect } from "react-redux"; -import React, { PureComponent } from "react"; +import FastTooltip from "components/fast_tooltip"; +import { PricingEnforcedSwitchSetting } from "components/pricing_enforcers"; +import Toast from "libs/toast"; import _ from "lodash"; -import type { APIDataset, APIUser } from "types/api_flow_types"; -import { - LogSliderSetting, - NumberSliderSetting, - SwitchSetting, - DropdownSetting, -} from "oxalis/view/components/setting_input_views"; -import type { UserConfiguration, OxalisState, DatasetConfiguration } from "oxalis/store"; -import { clearCache } from "admin/admin_rest_api"; +import messages, { settingsTooltips, settings as settingsLabels } from "messages"; +import type { ViewMode } from "oxalis/constants"; +import Constants, { BLEND_MODES } from "oxalis/constants"; +import defaultState from "oxalis/default_state"; import { getValidZoomRangeForUser } from "oxalis/model/accessors/flycam_accessor"; +import { setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { updateDatasetSettingAction, updateUserSettingAction, } from "oxalis/model/actions/settings_actions"; import { getGpuFactorsWithLabels } from "oxalis/model/bucket_data_handling/data_rendering_logic"; -import { setZoomStepAction } from "oxalis/model/actions/flycam_actions"; -import messages, { settingsTooltips, settings as settingsLabels } from "messages"; -import { userSettings } from "types/schemas/user_settings.schema"; -import type { ViewMode } from "oxalis/constants"; -import Constants, { BLEND_MODES } from "oxalis/constants"; import { api } from "oxalis/singletons"; -import Toast from "libs/toast"; -import { ExclamationCircleOutlined } from "@ant-design/icons"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; -import { PricingEnforcedSwitchSetting } from "components/pricing_enforcers"; +import type { DatasetConfiguration, OxalisState, UserConfiguration } from "oxalis/store"; +import { + DropdownSetting, + LogSliderSetting, + NumberSliderSetting, + SwitchSetting, +} from "oxalis/view/components/setting_input_views"; +import React, { PureComponent } from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import type { ArrayElement } from "types/globals"; -import FastTooltip from "components/fast_tooltip"; -import defaultState from "oxalis/default_state"; +import { userSettings } from "types/schemas/user_settings.schema"; type ControlsAndRenderingSettingsTabProps = { activeUser: APIUser | null | undefined; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/histogram_view.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/histogram_view.tsx index 9ad2fe5ec1f..00f1bbbbb8f 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/histogram_view.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/histogram_view.tsx @@ -1,16 +1,16 @@ -import type { Dispatch } from "redux"; -import { Alert, Row, Col, InputNumber, Spin } from "antd"; -import { connect } from "react-redux"; -import * as React from "react"; +import { CloseOutlined } from "@ant-design/icons"; +import { Alert, Col, InputNumber, Row, Spin } from "antd"; +import FastTooltip from "components/fast_tooltip"; +import { Slider } from "components/slider"; +import { roundTo } from "libs/utils"; import * as _ from "lodash"; import { PRIMARY_COLOR, type Vector2, type Vector3 } from "oxalis/constants"; -import type { APIHistogramData, HistogramDatum, ElementClass } from "types/api_flow_types"; -import { roundTo } from "libs/utils"; import { updateLayerSettingAction } from "oxalis/model/actions/settings_actions"; import type { DatasetLayerConfiguration } from "oxalis/store"; -import { CloseOutlined } from "@ant-design/icons"; -import FastTooltip from "components/fast_tooltip"; -import { Slider } from "components/slider"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; +import type { APIHistogramData, ElementClass, HistogramDatum } from "types/api_flow_types"; type OwnProps = { data: APIHistogramData | null | undefined; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 30d8af3a370..9c4b9ed726a 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -1,136 +1,136 @@ -import { Button, Col, Divider, Dropdown, type MenuProps, Modal, Row, Switch } from "antd"; -import type { Dispatch } from "redux"; import { EditOutlined, + EllipsisOutlined, InfoCircleOutlined, + LockOutlined, + MenuOutlined, + PlusOutlined, ReloadOutlined, + SaveOutlined, ScanOutlined, - WarningOutlined, - PlusOutlined, - VerticalAlignMiddleOutlined, - LockOutlined, UnlockOutlined, - EllipsisOutlined, - SaveOutlined, - MenuOutlined, + VerticalAlignMiddleOutlined, + WarningOutlined, } from "@ant-design/icons"; -import ErrorHandling from "libs/error_handling"; -import { connect, useDispatch, useSelector } from "react-redux"; -import React, { useCallback } from "react"; -import _ from "lodash"; -import classnames from "classnames"; -import update from "immutability-helper"; -import { - type AnnotationLayerType, - APIAnnotationTypeEnum, - AnnotationLayerEnum, - type APIDataLayer, - type APIDataset, - type APISkeletonLayer, - APIJobType, - type EditableLayerProperties, -} from "types/api_flow_types"; -import type { ValueOf } from "types/globals"; -import { HoverIconButton } from "components/hover_icon_button"; -import { - SwitchSetting, - NumberSliderSetting, - LogSliderSetting, - ColorSetting, - SETTING_LEFT_SPAN, - SETTING_MIDDLE_SPAN, - SETTING_VALUE_SPAN, -} from "oxalis/view/components/setting_input_views"; -import { M4x4, V3 } from "libs/mjs"; -import { editAnnotationLayerAction } from "oxalis/model/actions/annotation_actions"; -import { - enforceSkeletonTracing, - getActiveNode, -} from "oxalis/model/accessors/skeletontracing_accessor"; +import { DndContext, type DragEndEvent } from "@dnd-kit/core"; +import { SortableContext, useSortable, verticalListSortingStrategy } from "@dnd-kit/sortable"; +import { CSS } from "@dnd-kit/utilities"; import { - findDataPositionForLayer, clearCache, + findDataPositionForLayer, findDataPositionForVolumeTracing, - updateDatasetDefaultConfiguration, startComputeSegmentIndexFileJob, + updateDatasetDefaultConfiguration, } from "admin/admin_rest_api"; +import { Button, Col, Divider, Dropdown, type MenuProps, Modal, Row, Switch } from "antd"; +import classnames from "classnames"; +import FastTooltip from "components/fast_tooltip"; +import { HoverIconButton } from "components/hover_icon_button"; +import update from "immutability-helper"; +import ErrorHandling from "libs/error_handling"; +import { M4x4, V3 } from "libs/mjs"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import { + type RecommendedConfiguration, + layerViewConfigurationTooltips, + layerViewConfigurations, + settings, + settingsTooltips, +} from "messages"; +import type { Vector3 } from "oxalis/constants"; +import Constants, { ControlModeEnum, MappingStatusEnum } from "oxalis/constants"; +import defaultState from "oxalis/default_state"; import { getDefaultValueRangeOfLayer, getElementClass, isColorLayer as getIsColorLayer, + getLayerBoundingBox, getLayerByName, getMagInfo, + getTransformsForLayer, getTransformsForLayerOrNull, getWidestMags, - getLayerBoundingBox, - getTransformsForLayer, hasDatasetTransforms, } from "oxalis/model/accessors/dataset_accessor"; import { getMaxZoomValueForMag, getPosition } from "oxalis/model/accessors/flycam_accessor"; +import { + enforceSkeletonTracing, + getActiveNode, +} from "oxalis/model/accessors/skeletontracing_accessor"; import { getAllReadableLayerNames, getReadableNameByVolumeTracingId, getVolumeDescriptorById, getVolumeTracingById, } from "oxalis/model/accessors/volumetracing_accessor"; -import { - setNodeRadiusAction, - setShowSkeletonsAction, -} from "oxalis/model/actions/skeletontracing_actions"; +import { editAnnotationLayerAction } from "oxalis/model/actions/annotation_actions"; import { setPositionAction, setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { - updateUserSettingAction, - updateDatasetSettingAction, - updateLayerSettingAction, + pushSaveQueueTransaction, + pushSaveQueueTransactionIsolated, +} from "oxalis/model/actions/save_actions"; +import { dispatchClipHistogramAsync, reloadHistogramAction, + updateDatasetSettingAction, + updateLayerSettingAction, + updateUserSettingAction, } from "oxalis/model/actions/settings_actions"; -import { userSettings } from "types/schemas/user_settings.schema"; -import type { Vector3 } from "oxalis/constants"; -import Constants, { ControlModeEnum, MappingStatusEnum } from "oxalis/constants"; -import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import { + setNodeRadiusAction, + setShowSkeletonsAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { + invertTransform, + transformPointUnscaled, +} from "oxalis/model/helpers/transformation_helpers"; +import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; import { Model } from "oxalis/singletons"; +import { api } from "oxalis/singletons"; import type { - VolumeTracing, DatasetConfiguration, DatasetLayerConfiguration, OxalisState, UserConfiguration, + VolumeTracing, } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import * as Utils from "libs/utils"; -import { api } from "oxalis/singletons"; -import { - layerViewConfigurations, - layerViewConfigurationTooltips, - type RecommendedConfiguration, - settings, - settingsTooltips, -} from "messages"; import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; -import AddVolumeLayerModal, { validateReadableLayerName } from "./modals/add_volume_layer_modal"; -import Histogram, { isHistogramSupported } from "./histogram_view"; -import MappingSettingsView from "./mapping_settings_view"; -import { confirmAsync } from "../../../dashboard/dataset/helper_components"; +import EditableTextLabel from "oxalis/view/components/editable_text_label"; import { - invertTransform, - transformPointUnscaled, -} from "oxalis/model/helpers/transformation_helpers"; -import FastTooltip from "components/fast_tooltip"; -import { SortableContext, useSortable, verticalListSortingStrategy } from "@dnd-kit/sortable"; -import { DndContext, type DragEndEvent } from "@dnd-kit/core"; -import { CSS } from "@dnd-kit/utilities"; + ColorSetting, + LogSliderSetting, + NumberSliderSetting, + SETTING_LEFT_SPAN, + SETTING_MIDDLE_SPAN, + SETTING_VALUE_SPAN, + SwitchSetting, +} from "oxalis/view/components/setting_input_views"; +import React, { useCallback } from "react"; +import { connect, useDispatch, useSelector } from "react-redux"; +import type { Dispatch } from "redux"; +import { + APIAnnotationTypeEnum, + type APIDataLayer, + type APIDataset, + APIJobType, + type APISkeletonLayer, + AnnotationLayerEnum, + type AnnotationLayerType, + type EditableLayerProperties, +} from "types/api_flow_types"; +import type { ValueOf } from "types/globals"; import { defaultDatasetViewConfigurationWithoutNull, getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; -import defaultState from "oxalis/default_state"; -import { - pushSaveQueueTransaction, - pushSaveQueueTransactionIsolated, -} from "oxalis/model/actions/save_actions"; -import { addLayerToAnnotation, deleteAnnotationLayer } from "oxalis/model/sagas/update_actions"; +import { userSettings } from "types/schemas/user_settings.schema"; +import { confirmAsync } from "../../../dashboard/dataset/helper_components"; +import Histogram, { isHistogramSupported } from "./histogram_view"; +import MappingSettingsView from "./mapping_settings_view"; +import AddVolumeLayerModal, { validateReadableLayerName } from "./modals/add_volume_layer_modal"; type DatasetSettingsProps = ReturnType & ReturnType; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx index 00850c36eae..51de6ac673c 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx @@ -1,33 +1,33 @@ import { Select } from "antd"; -import { connect } from "react-redux"; -import React from "react"; -import debounceRender from "react-debounce-render"; -import type { APISegmentationLayer } from "types/api_flow_types"; +import FastTooltip from "components/fast_tooltip"; +import * as Utils from "libs/utils"; +import messages from "messages"; import { MappingStatusEnum } from "oxalis/constants"; -import type { OxalisState, Mapping, MappingType, EditableMapping } from "oxalis/store"; +import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; import { - getSegmentationLayerByName, getMappingInfo, + getSegmentationLayerByName, } from "oxalis/model/accessors/dataset_accessor"; +import { + getEditableMappingForVolumeTracingId, + hasEditableMapping, + isMappingLocked, +} from "oxalis/model/accessors/volumetracing_accessor"; import { ensureLayerMappingsAreLoadedAction, setLayerMappingsAction, } from "oxalis/model/actions/dataset_actions"; import { - setMappingEnabledAction, setHideUnmappedIdsAction, setMappingAction, + setMappingEnabledAction, } from "oxalis/model/actions/settings_actions"; +import type { EditableMapping, Mapping, MappingType, OxalisState } from "oxalis/store"; import { SwitchSetting } from "oxalis/view/components/setting_input_views"; -import * as Utils from "libs/utils"; -import { - getEditableMappingForVolumeTracingId, - hasEditableMapping, - isMappingLocked, -} from "oxalis/model/accessors/volumetracing_accessor"; -import messages from "messages"; -import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; -import FastTooltip from "components/fast_tooltip"; +import React from "react"; +import debounceRender from "react-debounce-render"; +import { connect } from "react-redux"; +import type { APISegmentationLayer } from "types/api_flow_types"; const { Option, OptGroup } = Select; diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx index 2e200281a0d..07e787a3f31 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/modals/add_volume_layer_modal.tsx @@ -1,34 +1,34 @@ -import { Modal, Row } from "antd"; import { PlusOutlined } from "@ant-design/icons"; -import type React from "react"; -import { useMemo, useState } from "react"; -import _ from "lodash"; -import type { APIDataset, APISegmentationLayer } from "types/api_flow_types"; +import { Modal, Row } from "antd"; import { AsyncButton } from "components/async_clickables"; import { NewVolumeLayerSelection, RestrictMagnificationSlider, } from "dashboard/advanced_dataset/create_explorative_modal"; -import Store, { type Tracing } from "oxalis/store"; +import Toast from "libs/toast"; +import _ from "lodash"; +import messages from "messages"; +import { MappingStatusEnum } from "oxalis/constants"; import { - getSomeMagInfoForDataset, getLayerByName, + getMagInfo, getMappingInfo, getSegmentationLayers, - getMagInfo, + getSomeMagInfoForDataset, } from "oxalis/model/accessors/dataset_accessor"; import { getAllReadableLayerNames, getVolumeTracingLayers, } from "oxalis/model/accessors/volumetracing_accessor"; -import messages from "messages"; -import InputComponent from "oxalis/view/components/input_component"; -import { api, Model } from "oxalis/singletons"; -import Toast from "libs/toast"; -import { MappingStatusEnum } from "oxalis/constants"; import { pushSaveQueueTransactionIsolated } from "oxalis/model/actions/save_actions"; -import { useDispatch } from "react-redux"; import { addLayerToAnnotation } from "oxalis/model/sagas/update_actions"; +import { Model, api } from "oxalis/singletons"; +import Store, { type Tracing } from "oxalis/store"; +import InputComponent from "oxalis/view/components/input_component"; +import type React from "react"; +import { useMemo, useState } from "react"; +import { useDispatch } from "react-redux"; +import type { APIDataset, APISegmentationLayer } from "types/api_flow_types"; export type ValidationResult = { isValid: boolean; message: string }; export function checkForLayerNameDuplication( diff --git a/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx b/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx index d8491256a56..af45d49d9c6 100644 --- a/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx @@ -1,4 +1,4 @@ -import { Modal, Button, Spin, Tooltip } from "antd"; +import { Button, Modal, Spin, Tooltip } from "antd"; type Props = { isCloseable: boolean; diff --git a/frontend/javascripts/oxalis/view/new_task_description_modal.tsx b/frontend/javascripts/oxalis/view/new_task_description_modal.tsx index 6eb409dd798..e912406dbe3 100644 --- a/frontend/javascripts/oxalis/view/new_task_description_modal.tsx +++ b/frontend/javascripts/oxalis/view/new_task_description_modal.tsx @@ -1,4 +1,4 @@ -import { Modal, Button } from "antd"; +import { Button, Modal } from "antd"; import Markdown from "libs/markdown_adapter"; import * as React from "react"; type Props = { diff --git a/frontend/javascripts/oxalis/view/nml_upload_zone_container.tsx b/frontend/javascripts/oxalis/view/nml_upload_zone_container.tsx index 5d37fa67e45..3e084cfa8d1 100644 --- a/frontend/javascripts/oxalis/view/nml_upload_zone_container.tsx +++ b/frontend/javascripts/oxalis/view/nml_upload_zone_container.tsx @@ -1,13 +1,13 @@ -import { Button, Modal, Avatar, List, Spin, Checkbox, Alert } from "antd"; import { FileOutlined, InboxOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; -import Dropzone, { type DropzoneInputProps } from "react-dropzone"; -import * as React from "react"; +import { Alert, Avatar, Button, Checkbox, List, Modal, Spin } from "antd"; +import FormattedDate from "components/formatted_date"; +import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; +import type { OxalisState } from "oxalis/store"; import prettyBytes from "pretty-bytes"; +import * as React from "react"; +import Dropzone, { type DropzoneInputProps } from "react-dropzone"; +import { connect } from "react-redux"; import type { Dispatch } from "redux"; -import type { OxalisState } from "oxalis/store"; -import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; -import FormattedDate from "components/formatted_date"; type State = { files: Array; diff --git a/frontend/javascripts/oxalis/view/novel_user_experiences/01-present-modern-controls.tsx b/frontend/javascripts/oxalis/view/novel_user_experiences/01-present-modern-controls.tsx index 8935020703d..c4c878da843 100644 --- a/frontend/javascripts/oxalis/view/novel_user_experiences/01-present-modern-controls.tsx +++ b/frontend/javascripts/oxalis/view/novel_user_experiences/01-present-modern-controls.tsx @@ -1,9 +1,9 @@ -import { Modal, Button } from "antd"; -import * as React from "react"; -import { useSelector, useDispatch } from "react-redux"; -import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import { updateNovelUserExperienceInfos } from "admin/admin_rest_api"; +import { Button, Modal } from "antd"; +import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import type { OxalisState } from "oxalis/store"; +import * as React from "react"; +import { useDispatch, useSelector } from "react-redux"; export default function PresentModernControls() { const dispatch = useDispatch(); diff --git a/frontend/javascripts/oxalis/view/novel_user_experiences/welcome_toast.tsx b/frontend/javascripts/oxalis/view/novel_user_experiences/welcome_toast.tsx index 2e9b209fe80..cf5dfc34a32 100644 --- a/frontend/javascripts/oxalis/view/novel_user_experiences/welcome_toast.tsx +++ b/frontend/javascripts/oxalis/view/novel_user_experiences/welcome_toast.tsx @@ -1,9 +1,9 @@ import { Button, notification } from "antd"; -import { useSelector } from "react-redux"; import features from "features"; +import { useEffectOnlyOnce } from "libs/react_hooks"; import UserLocalStorage from "libs/user_local_storage"; import type { OxalisState } from "oxalis/store"; -import { useEffectOnlyOnce } from "libs/react_hooks"; +import { useSelector } from "react-redux"; function showWelcomeToast() { notification.open({ diff --git a/frontend/javascripts/oxalis/view/plane_view.ts b/frontend/javascripts/oxalis/view/plane_view.ts index 9337b67d0c2..8162ef210d8 100644 --- a/frontend/javascripts/oxalis/view/plane_view.ts +++ b/frontend/javascripts/oxalis/view/plane_view.ts @@ -1,23 +1,23 @@ -import * as THREE from "three"; -// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message -import TWEEN from "tween.js"; -import _ from "lodash"; -import { getGroundTruthLayoutRect } from "oxalis/view/layouting/default_layout_configs"; -import { getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; -import { updateTemporarySettingAction } from "oxalis/model/actions/settings_actions"; -import type { OrthoViewMap, Vector3, Viewport } from "oxalis/constants"; -import Constants, { OrthoViewColors, OrthoViewValues, OrthoViews } from "oxalis/constants"; -import Store from "oxalis/store"; import app from "app"; -import getSceneController from "oxalis/controller/scene_controller_provider"; -import window from "libs/window"; -import { clearCanvas, setupRenderArea } from "oxalis/view/rendering_utils"; import VisibilityAwareRaycaster, { type RaycastIntersection, } from "libs/visibility_aware_raycaster"; -import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; -import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import window from "libs/window"; +import _ from "lodash"; +import type { OrthoViewMap, Vector3, Viewport } from "oxalis/constants"; +import Constants, { OrthoViewColors, OrthoViewValues, OrthoViews } from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; import type { MeshSceneNode, SceneGroupForMeshes } from "oxalis/controller/segment_mesh_controller"; +import { getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; +import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import { updateTemporarySettingAction } from "oxalis/model/actions/settings_actions"; +import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; +import Store from "oxalis/store"; +import { getGroundTruthLayoutRect } from "oxalis/view/layouting/default_layout_configs"; +import { clearCanvas, setupRenderArea } from "oxalis/view/rendering_utils"; +import * as THREE from "three"; +// @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'twee... Remove this comment to see the full error message +import TWEEN from "tween.js"; const createDirLight = ( position: Vector3, diff --git a/frontend/javascripts/oxalis/view/recommended_configuration_modal.tsx b/frontend/javascripts/oxalis/view/recommended_configuration_modal.tsx index 55732f2eb31..c042f7d2199 100644 --- a/frontend/javascripts/oxalis/view/recommended_configuration_modal.tsx +++ b/frontend/javascripts/oxalis/view/recommended_configuration_modal.tsx @@ -1,9 +1,9 @@ +import { settingComments } from "admin/tasktype/recommended_configuration_view"; import { Modal, Table } from "antd"; -import * as React from "react"; import _ from "lodash"; -import type { RecommendedConfiguration } from "oxalis/store"; -import { settingComments } from "admin/tasktype/recommended_configuration_view"; import messages, { settings } from "messages"; +import type { RecommendedConfiguration } from "oxalis/store"; +import * as React from "react"; const columns = [ { title: "Setting", diff --git a/frontend/javascripts/oxalis/view/recording_switch.tsx b/frontend/javascripts/oxalis/view/recording_switch.tsx index 8d26d2a4019..291d00f0fc8 100644 --- a/frontend/javascripts/oxalis/view/recording_switch.tsx +++ b/frontend/javascripts/oxalis/view/recording_switch.tsx @@ -1,8 +1,8 @@ import { Switch } from "antd"; +import { setFlightmodeRecordingAction } from "oxalis/model/actions/settings_actions"; +import type { OxalisState } from "oxalis/store"; import { connect } from "react-redux"; import type { Dispatch } from "redux"; -import type { OxalisState } from "oxalis/store"; -import { setFlightmodeRecordingAction } from "oxalis/model/actions/settings_actions"; type Props = { flightmodeRecording: boolean; onChangeFlightmodeRecording: (arg0: boolean) => void; diff --git a/frontend/javascripts/oxalis/view/remove_tree_modal.tsx b/frontend/javascripts/oxalis/view/remove_tree_modal.tsx index 083f5975ec6..44a5bef82fb 100644 --- a/frontend/javascripts/oxalis/view/remove_tree_modal.tsx +++ b/frontend/javascripts/oxalis/view/remove_tree_modal.tsx @@ -1,9 +1,9 @@ -import { Modal, Checkbox } from "antd"; -import * as React from "react"; +import { Checkbox, Modal } from "antd"; +import type { CheckboxChangeEvent } from "antd/lib/checkbox"; +import messages from "messages"; import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import Store from "oxalis/store"; -import messages from "messages"; -import type { CheckboxChangeEvent } from "antd/lib/checkbox"; +import * as React from "react"; type Props = { onOk: (...args: Array) => any; destroy?: (...args: Array) => any; diff --git a/frontend/javascripts/oxalis/view/rendering_utils.ts b/frontend/javascripts/oxalis/view/rendering_utils.ts index 62e31e4aa03..fe8a84b52ac 100644 --- a/frontend/javascripts/oxalis/view/rendering_utils.ts +++ b/frontend/javascripts/oxalis/view/rendering_utils.ts @@ -1,6 +1,5 @@ -import * as THREE from "three"; import { saveAs } from "file-saver"; -import Store from "oxalis/store"; +import { convertBufferToImage } from "libs/utils"; import { ARBITRARY_CAM_DISTANCE, type OrthoView } from "oxalis/constants"; import constants, { ArbitraryViewport, @@ -8,10 +7,11 @@ import constants, { OrthoViewValues, OrthoViews, } from "oxalis/constants"; -import { getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; import getSceneController from "oxalis/controller/scene_controller_provider"; import { getFlooredPosition } from "oxalis/model/accessors/flycam_accessor"; -import { convertBufferToImage } from "libs/utils"; +import { getInputCatcherRect } from "oxalis/model/accessors/view_mode_accessor"; +import Store from "oxalis/store"; +import * as THREE from "three"; const getBackgroundColor = (): number => Store.getState().uiInformation.theme === "dark" ? 0x000000 : 0xffffff; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_renderer.ts b/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_renderer.ts index 929ea6c270a..40e23290496 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_renderer.ts +++ b/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_renderer.ts @@ -1,7 +1,7 @@ import _ from "lodash"; -import type { Tree } from "oxalis/store"; -import type { Vector2 } from "oxalis/constants"; import messages from "messages"; +import type { Vector2 } from "oxalis/constants"; +import type { Tree } from "oxalis/store"; const NODE_RADIUS = 2; const MAX_NODE_DISTANCE = 100; const CLICK_TRESHOLD = 6; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_tab.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_tab.tsx index 0fe78928d1a..2798f4e881a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_tab.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/abstract_tree_tab.tsx @@ -1,13 +1,13 @@ import { Button } from "antd"; -import type { Dispatch } from "redux"; -import { connect } from "react-redux"; -import React, { Component } from "react"; +import window from "libs/window"; import _ from "lodash"; -import type { OxalisState, SkeletonTracing } from "oxalis/store"; import { setActiveNodeAction } from "oxalis/model/actions/skeletontracing_actions"; +import type { OxalisState, SkeletonTracing } from "oxalis/store"; import type { NodeListItem } from "oxalis/view/right-border-tabs/abstract_tree_renderer"; import AbstractTreeRenderer from "oxalis/view/right-border-tabs/abstract_tree_renderer"; -import window from "libs/window"; +import React, { Component } from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; type StateProps = { dispatch: Dispatch; skeletonTracing: SkeletonTracing | null | undefined; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx index 54852103229..1a8b777ba00 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/advanced_search_popover.tsx @@ -1,11 +1,11 @@ -import { Input, Tooltip, Popover, Space, type InputRef } from "antd"; import { CheckSquareOutlined, DownOutlined, UpOutlined } from "@ant-design/icons"; -import * as React from "react"; +import { Input, type InputRef, Popover, Space, Tooltip } from "antd"; +import Shortcut from "libs/shortcut_component"; +import { mod } from "libs/utils"; import memoizeOne from "memoize-one"; import ButtonComponent from "oxalis/view/components/button_component"; -import Shortcut from "libs/shortcut_component"; import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; -import { mod } from "libs/utils"; +import * as React from "react"; const PRIMARY_COLOR = "var(--ant-color-primary)"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx index cc837f4ac31..7617be4828b 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/bounding_box_tab.tsx @@ -1,32 +1,32 @@ -import { type MenuProps, Table, Tooltip, Typography } from "antd"; import { PlusSquareOutlined } from "@ant-design/icons"; -import { useSelector, useDispatch } from "react-redux"; -import type React from "react"; -import { useCallback, useEffect, useRef, useState } from "react"; +import { type MenuProps, Table, Tooltip, Typography } from "antd"; +import * as Utils from "libs/utils"; import _ from "lodash"; -import UserBoundingBoxInput from "oxalis/view/components/setting_input_views"; import { - type Vector3, - type Vector6, type BoundingBoxType, ControlModeEnum, + type Vector3, + type Vector6, } from "oxalis/constants"; +import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; +import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { - changeUserBoundingBoxAction, addUserBoundingBoxAction, + changeUserBoundingBoxAction, deleteUserBoundingBoxAction, } from "oxalis/model/actions/annotation_actions"; -import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; -import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; import { setPositionAction } from "oxalis/model/actions/flycam_actions"; -import * as Utils from "libs/utils"; +import { setActiveUserBoundingBoxId } from "oxalis/model/actions/ui_actions"; import type { OxalisState, UserBoundingBox } from "oxalis/store"; -import DownloadModalView from "../action-bar/download_modal_view"; +import UserBoundingBoxInput from "oxalis/view/components/setting_input_views"; +import type React from "react"; +import { useCallback, useEffect, useRef, useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import AutoSizer from "react-virtualized-auto-sizer"; import { APIJobType } from "types/api_flow_types"; -import { ContextMenuContainer } from "./sidebar_context_menu"; +import DownloadModalView from "../action-bar/download_modal_view"; import { getContextMenuPositionFromEvent } from "../context_menu"; -import AutoSizer from "react-virtualized-auto-sizer"; -import { setActiveUserBoundingBoxId } from "oxalis/model/actions/ui_actions"; +import { ContextMenuContainer } from "./sidebar_context_menu"; const ADD_BBOX_BUTTON_HEIGHT = 32; const CONTEXT_MENU_CLASS = "bbox-list-context-menu-overlay"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment.tsx index 4d06f190740..8530a56122f 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment.tsx @@ -2,12 +2,12 @@ import { Popover } from "antd"; import type * as React from "react"; import classNames from "classnames"; -import { MarkdownWrapper } from "oxalis/view/components/markdown_modal"; -import { NODE_ID_REF_REGEX, POSITION_REF_REGEX } from "oxalis/constants"; import { document } from "libs/window"; +import { NODE_ID_REF_REGEX, POSITION_REF_REGEX } from "oxalis/constants"; import { setActiveNodeAction } from "oxalis/model/actions/skeletontracing_actions"; import type { CommentType } from "oxalis/store"; import Store from "oxalis/store"; +import { MarkdownWrapper } from "oxalis/view/components/markdown_modal"; function linkify(comment: string) { return comment // Replace linked nodes (#) with a proper link diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx index 64b0e40dc81..345714e97fe 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/comment_tab/comment_tab_view.tsx @@ -1,4 +1,3 @@ -import { Dropdown, Tooltip, Space, Tree as AntdTree, type TreeProps, type GetRef } from "antd"; import { ArrowLeftOutlined, ArrowRightOutlined, @@ -8,24 +7,23 @@ import { SearchOutlined, ShrinkOutlined, } from "@ant-design/icons"; -import { useDispatch, useSelector } from "react-redux"; -import React, { useEffect, useRef, useState } from "react"; +import { Tree as AntdTree, Dropdown, type GetRef, Space, Tooltip, type TreeProps } from "antd"; +import type { EventDataNode } from "antd/es/tree"; +import { useLifecycle } from "beautiful-react-hooks"; +import { InputKeyboard } from "libs/input"; +import { useEffectOnlyOnce } from "libs/react_hooks"; +import { compareBy, localeCompareBy } from "libs/utils"; import _ from "lodash"; import memoizeOne from "memoize-one"; -import { Comment, commentListId } from "oxalis/view/right-border-tabs/comment_tab/comment"; -import { compareBy, localeCompareBy } from "libs/utils"; -import { InputKeyboard } from "libs/input"; -import { MarkdownModal } from "oxalis/view/components/markdown_modal"; -import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; +import messages from "messages"; +import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; import { getActiveNode, getSkeletonTracing } from "oxalis/model/accessors/skeletontracing_accessor"; import { - setActiveNodeAction, createCommentAction, deleteCommentAction, + setActiveNodeAction, } from "oxalis/model/actions/skeletontracing_actions"; -import ButtonComponent from "oxalis/view/components/button_component"; -import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; -import InputComponent from "oxalis/view/components/input_component"; +import { cachedDiffTrees } from "oxalis/model/sagas/skeletontracing_saga"; import type { CommentType, MutableCommentType, @@ -34,16 +32,18 @@ import type { Tree, TreeMap, } from "oxalis/store"; -import messages from "messages"; -import AdvancedSearchPopover from "../advanced_search_popover"; +import ButtonComponent from "oxalis/view/components/button_component"; +import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; +import InputComponent from "oxalis/view/components/input_component"; +import { MarkdownModal } from "oxalis/view/components/markdown_modal"; +import { Comment, commentListId } from "oxalis/view/right-border-tabs/comment_tab/comment"; import type { MenuProps } from "rc-menu"; -import type { Comparator } from "types/globals"; -import type { EventDataNode } from "antd/es/tree"; +import React, { useEffect, useRef, useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; import AutoSizer from "react-virtualized-auto-sizer"; -import { useEffectOnlyOnce } from "libs/react_hooks"; +import type { Comparator } from "types/globals"; +import AdvancedSearchPopover from "../advanced_search_popover"; import { ColoredDotIcon } from "../segments_tab/segment_list_item"; -import { useLifecycle } from "beautiful-react-hooks"; -import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; const commentTabId = "commentTabId"; enum SortByEnum { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_filters.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_filters.tsx index de669865627..7afa6ab593b 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_filters.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_filters.tsx @@ -1,14 +1,14 @@ -import { Checkbox, Divider, Popover, Tooltip } from "antd"; import { FilterOutlined } from "@ant-design/icons"; -import React from "react"; +import { Checkbox, Divider, Popover, Tooltip } from "antd"; import _ from "lodash"; +import ButtonComponent from "oxalis/view/components/button_component"; import type { - Synapse, ConnectomeData, DirectionCaptionsKeys, + Synapse, } from "oxalis/view/right-border-tabs/connectome_tab/synapse_tree"; import { directionCaptions } from "oxalis/view/right-border-tabs/connectome_tab/synapse_tree"; -import ButtonComponent from "oxalis/view/components/button_component"; +import React from "react"; type SynapseDirection = "in" | "out"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_settings.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_settings.tsx index e8e136052ac..c5a30e71ae0 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_settings.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_settings.tsx @@ -1,22 +1,22 @@ -import { Popover, Select, Tooltip, Row, Col } from "antd"; import { SettingOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; -import React from "react"; -import type { APISegmentationLayer, APIConnectomeFile, APIDataset } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; -import Store from "oxalis/store"; -import ButtonComponent from "oxalis/view/components/button_component"; import { getConnectomeFilesForDatasetLayer } from "admin/admin_rest_api"; +import { Col, Popover, Row, Select, Tooltip } from "antd"; +import { settings } from "messages"; +import defaultState from "oxalis/default_state"; import { updateConnectomeFileListAction, updateCurrentConnectomeFileAction, } from "oxalis/model/actions/connectome_actions"; -import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; -import { userSettings } from "types/schemas/user_settings.schema"; -import { settings } from "messages"; import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; +import type { OxalisState } from "oxalis/store"; +import Store from "oxalis/store"; +import ButtonComponent from "oxalis/view/components/button_component"; import { NumberSliderSetting } from "oxalis/view/components/setting_input_views"; -import defaultState from "oxalis/default_state"; +import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import React from "react"; +import { connect } from "react-redux"; +import type { APIConnectomeFile, APIDataset, APISegmentationLayer } from "types/api_flow_types"; +import { userSettings } from "types/schemas/user_settings.schema"; const { Option } = Select; type OwnProps = { segmentationLayer: APISegmentationLayer | null | undefined; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx index ac50d3a0de1..9628ab6b503 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/connectome_view.tsx @@ -1,66 +1,66 @@ -import { Alert, Empty, Space, Tooltip, type TreeProps } from "antd"; -import { connect } from "react-redux"; -import Maybe from "data.maybe"; -import React from "react"; -import _ from "lodash"; -import type { - APISegmentationLayer, - APIDataset, - APIConnectomeFile, - APIDataSourceId, -} from "types/api_flow_types"; -import { diffArrays, unique, map3 } from "libs/utils"; -import { getTreeNameForAgglomerateSkeleton } from "oxalis/model/accessors/skeletontracing_accessor"; -import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import { - getSynapsesOfAgglomerates, - getSynapseSources, getSynapseDestinations, getSynapsePositions, + getSynapseSources, getSynapseTypes, + getSynapsesOfAgglomerates, } from "admin/admin_rest_api"; +import { Alert, Empty, Space, Tooltip, type TreeProps } from "antd"; +import Maybe from "data.maybe"; +import DiffableMap from "libs/diffable_map"; +import { stringToAntdColorPresetRgb } from "libs/format_utils"; +import Toast from "libs/toast"; +import { diffArrays, map3, unique } from "libs/utils"; +import _ from "lodash"; +import { TreeTypeEnum, type Vector3 } from "oxalis/constants"; +import Constants, { MappingStatusEnum } from "oxalis/constants"; +import getSceneController from "oxalis/controller/scene_controller_provider"; import { - getVisibleOrLastSegmentationLayer, getMappingInfo, + getVisibleOrLastSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; +import { getTreeNameForAgglomerateSkeleton } from "oxalis/model/accessors/skeletontracing_accessor"; import { - initializeConnectomeTracingAction, - removeConnectomeTracingAction, - deleteConnectomeTreesAction, addConnectomeTreesAction, - setConnectomeTreesVisibilityAction, - setActiveConnectomeAgglomerateIdsAction, + deleteConnectomeTreesAction, + initializeConnectomeTracingAction, loadConnectomeAgglomerateSkeletonAction, removeConnectomeAgglomerateSkeletonAction, + removeConnectomeTracingAction, + setActiveConnectomeAgglomerateIdsAction, + setConnectomeTreesVisibilityAction, } from "oxalis/model/actions/connectome_actions"; -import { stringToAntdColorPresetRgb } from "libs/format_utils"; import { setMappingAction } from "oxalis/model/actions/settings_actions"; -import ButtonComponent from "oxalis/view/components/button_component"; -import { TreeTypeEnum, type Vector3 } from "oxalis/constants"; -import Constants, { MappingStatusEnum } from "oxalis/constants"; -import DiffableMap from "libs/diffable_map"; import EdgeCollection from "oxalis/model/edge_collection"; -import InputComponent from "oxalis/view/components/input_component"; import type { - OxalisState, - MutableTree, + ActiveMappingInfo, MutableNode, + MutableTree, MutableTreeMap, - ActiveMappingInfo, + OxalisState, } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; -import getSceneController from "oxalis/controller/scene_controller_provider"; +import ButtonComponent from "oxalis/view/components/button_component"; +import InputComponent from "oxalis/view/components/input_component"; +import ConnectomeFilters from "oxalis/view/right-border-tabs/connectome_tab/connectome_filters"; +import ConnectomeSettings from "oxalis/view/right-border-tabs/connectome_tab/connectome_settings"; import type { - ConnectomeData, Agglomerate, + ConnectomeData, TreeNode, } from "oxalis/view/right-border-tabs/connectome_tab/synapse_tree"; import SynapseTree, { convertConnectomeToTreeData, } from "oxalis/view/right-border-tabs/connectome_tab/synapse_tree"; -import ConnectomeFilters from "oxalis/view/right-border-tabs/connectome_tab/connectome_filters"; -import ConnectomeSettings from "oxalis/view/right-border-tabs/connectome_tab/connectome_settings"; +import { getBaseSegmentationName } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; +import React from "react"; +import { connect } from "react-redux"; +import type { + APIConnectomeFile, + APIDataSourceId, + APIDataset, + APISegmentationLayer, +} from "types/api_flow_types"; const connectomeTabId = "connectome-view"; type StateProps = { dataset: APIDataset; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/synapse_tree.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/synapse_tree.tsx index 2f9cc9d07c2..23ffc0e98c7 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/synapse_tree.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/connectome_tab/synapse_tree.tsx @@ -1,14 +1,14 @@ -import AutoSizer from "react-virtualized-auto-sizer"; import { Dropdown, type MenuProps, Tag, Tree, type TreeProps } from "antd"; -import React from "react"; +import { stringToAntdColorPreset } from "libs/format_utils"; import _ from "lodash"; import memoizeOne from "memoize-one"; -import { stringToAntdColorPreset } from "libs/format_utils"; -import { api } from "oxalis/singletons"; import type { Vector3 } from "oxalis/constants"; -import type { APIConnectomeFile } from "types/api_flow_types"; -import Store from "oxalis/store"; import { updateTemporarySettingAction } from "oxalis/model/actions/settings_actions"; +import { api } from "oxalis/singletons"; +import Store from "oxalis/store"; +import React from "react"; +import AutoSizer from "react-virtualized-auto-sizer"; +import type { APIConnectomeFile } from "types/api_flow_types"; type BaseSynapse = { id: number; position: Vector3; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index cbd95bb2841..ac7ee093492 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -1,13 +1,14 @@ -import type { Dispatch } from "redux"; -import { Typography, Tag } from "antd"; -import { SettingOutlined, InfoCircleOutlined, EditOutlined } from "@ant-design/icons"; -import { connect } from "react-redux"; +import { EditOutlined, InfoCircleOutlined, SettingOutlined } from "@ant-design/icons"; +import { Tag, Typography } from "antd"; +import { formatNumberToVolume, formatScale, formatVoxels } from "libs/format_utils"; import Markdown from "libs/markdown_adapter"; -import React, { type CSSProperties } from "react"; -import { Link } from "react-router-dom"; -import type { APIDataset, APIUser } from "types/api_flow_types"; import { ControlModeEnum, LongUnitToShortUnitMap } from "oxalis/constants"; -import { formatNumberToVolume, formatScale, formatVoxels } from "libs/format_utils"; +import { + type TracingStats, + getSkeletonStats, + getStats, + getVolumeStats, +} from "oxalis/model/accessors/annotation_accessor"; import { getDatasetExtentAsString, getDatasetExtentInUnitAsProduct, @@ -17,27 +18,26 @@ import { } from "oxalis/model/accessors/dataset_accessor"; import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; import { - getSkeletonStats, - getStats, - getVolumeStats, - type TracingStats, -} from "oxalis/model/accessors/annotation_accessor"; -import { - setAnnotationNameAction, setAnnotationDescriptionAction, + setAnnotationNameAction, } from "oxalis/model/actions/annotation_actions"; +import React, { type CSSProperties } from "react"; +import { connect } from "react-redux"; +import { Link } from "react-router-dom"; +import type { Dispatch } from "redux"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import type { OxalisState, Task, Tracing } from "oxalis/store"; -import { formatUserName } from "oxalis/model/accessors/user_accessor"; -import { mayEditAnnotationProperties } from "oxalis/model/accessors/annotation_accessor"; -import { mayUserEditDataset, pluralize, safeNumberToStr } from "libs/utils"; -import { getReadableNameForLayerName } from "oxalis/model/accessors/volumetracing_accessor"; import { getOrganization } from "admin/admin_rest_api"; -import { MarkdownModal } from "../components/markdown_modal"; import FastTooltip from "components/fast_tooltip"; +import { mayUserEditDataset, pluralize, safeNumberToStr } from "libs/utils"; import messages from "messages"; +import { mayEditAnnotationProperties } from "oxalis/model/accessors/annotation_accessor"; +import { formatUserName } from "oxalis/model/accessors/user_accessor"; +import { getReadableNameForLayerName } from "oxalis/model/accessors/volumetracing_accessor"; import type { EmptyObject } from "types/globals"; +import { MarkdownModal } from "../components/markdown_modal"; type StateProps = { annotation: Tracing; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/delete_group_modal_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/delete_group_modal_view.tsx index bae50076b91..7810a53a1dc 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/delete_group_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/delete_group_modal_view.tsx @@ -1,4 +1,4 @@ -import { Modal, Button } from "antd"; +import { Button, Modal } from "antd"; import messages from "messages"; type Props = { diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/metadata_table.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/metadata_table.tsx index f55aa120a25..513dea8f6f3 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/metadata_table.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/metadata_table.tsx @@ -1,20 +1,20 @@ import { CloseOutlined, TagsOutlined } from "@ant-design/icons"; import { Button } from "antd"; +import FastTooltip from "components/fast_tooltip"; import { type APIMetadataWithError, - getTypeSelectDropdownMenu, - getUsedTagsWithinMetadata, InnerMetadataTable, MetadataValueInput, + getTypeSelectDropdownMenu, + getUsedTagsWithinMetadata, } from "dashboard/folders/metadata_table"; +import { memo } from "react"; import { type APIMetadataEntry, APIMetadataEnum, type MetadataEntryProto, } from "types/api_flow_types"; import { InputWithUpdateOnBlur } from "../components/input_with_update_on_blur"; -import { memo } from "react"; -import FastTooltip from "components/fast_tooltip"; const getKeyInputIdForIndex = (index: number) => `metadata-key-input-id-${index}`; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx index aa02951bc02..17a58c15036 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/scrollable_virtualized_tree.tsx @@ -1,8 +1,8 @@ import { Tree as AntdTree, type TreeProps } from "antd"; import type { BasicDataNode } from "antd/es/tree"; import { throttle } from "lodash"; -import { forwardRef, useCallback, useRef } from "react"; import type RcTree from "rc-tree"; +import { forwardRef, useCallback, useRef } from "react"; const MIN_SCROLL_SPEED = 30; const MAX_SCROLL_SPEED = 200; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx index 04ba5ef6737..2337df6556c 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx @@ -1,27 +1,40 @@ import { DeleteOutlined, + EllipsisOutlined, LoadingOutlined, ReloadOutlined, - VerticalAlignBottomOutlined, - EllipsisOutlined, TagsOutlined, + VerticalAlignBottomOutlined, } from "@ant-design/icons"; -import { List, type MenuProps, App } from "antd"; -import { useDispatch, useSelector } from "react-redux"; +import { App, List, type MenuProps } from "antd"; import Checkbox, { type CheckboxChangeEvent } from "antd/lib/checkbox/Checkbox"; import React from "react"; +import { useDispatch, useSelector } from "react-redux"; +import type { MenuItemType } from "antd/es/menu/interface"; import classnames from "classnames"; +import { ChangeColorMenuItemContent } from "components/color_picker"; +import FastTooltip from "components/fast_tooltip"; +import { V4 } from "libs/mjs"; +import Toast from "libs/toast"; import * as Utils from "libs/utils"; -import type { APISegmentationLayer, APIMeshFile } from "types/api_flow_types"; import type { Vector3, Vector4 } from "oxalis/constants"; +import { getSegmentIdForPosition } from "oxalis/controller/combinations/volume_handlers"; import { + getAdditionalCoordinatesAsString, + getPosition, +} from "oxalis/model/accessors/flycam_accessor"; +import { + getSegmentColorAsRGBA, + getSegmentName, +} from "oxalis/model/accessors/volumetracing_accessor"; +import { + refreshMeshAction, + removeMeshAction, triggerMeshDownloadAction, updateMeshVisibilityAction, - removeMeshAction, - refreshMeshAction, } from "oxalis/model/actions/annotation_actions"; -import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import { rgbaToCSS } from "oxalis/shaders/utils.glsl"; import type { ActiveMappingInfo, MeshInformation, @@ -30,25 +43,12 @@ import type { VolumeTracing, } from "oxalis/store"; import Store from "oxalis/store"; -import { - getSegmentColorAsRGBA, - getSegmentName, -} from "oxalis/model/accessors/volumetracing_accessor"; -import Toast from "libs/toast"; -import { rgbaToCSS } from "oxalis/shaders/utils.glsl"; -import { V4 } from "libs/mjs"; -import { ChangeColorMenuItemContent } from "components/color_picker"; -import type { MenuItemType } from "antd/es/menu/interface"; -import { withMappingActivationConfirmation } from "./segments_view_helper"; -import { LoadMeshMenuItemLabel } from "./load_mesh_menu_item_label"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import { - getAdditionalCoordinatesAsString, - getPosition, -} from "oxalis/model/accessors/flycam_accessor"; -import FastTooltip from "components/fast_tooltip"; +import EditableTextLabel from "oxalis/view/components/editable_text_label"; import { getContextMenuPositionFromEvent } from "oxalis/view/context_menu"; -import { getSegmentIdForPosition } from "oxalis/controller/combinations/volume_handlers"; +import type { APIMeshFile, APISegmentationLayer } from "types/api_flow_types"; +import type { AdditionalCoordinate } from "types/api_flow_types"; +import { LoadMeshMenuItemLabel } from "./load_mesh_menu_item_label"; +import { withMappingActivationConfirmation } from "./segments_view_helper"; const ALSO_DELETE_SEGMENT_FROM_LIST_KEY = "also-delete-segment-from-list"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx index b5efd828211..eba4554034d 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_statistics_modal.tsx @@ -3,25 +3,25 @@ import { Alert, Modal, Spin, Table } from "antd"; import saveAs from "file-saver"; import { formatNumberToVolume } from "libs/format_utils"; import { useFetch } from "libs/react_helpers"; +import { pluralize, transformToCSVRow } from "libs/utils"; import { LongUnitToShortUnitMap, type Vector3 } from "oxalis/constants"; -import { getMappingInfo, getMagInfo } from "oxalis/model/accessors/dataset_accessor"; -import type { OxalisState, Segment } from "oxalis/store"; -import { - type SegmentHierarchyNode, - type SegmentHierarchyGroup, - getVolumeRequestUrl, -} from "./segments_view_helper"; -import { api } from "oxalis/singletons"; -import type { APISegmentationLayer, VoxelSize } from "types/api_flow_types"; -import { getBoundingBoxInMag1 } from "oxalis/model/sagas/volume/helpers"; -import { useSelector } from "react-redux"; +import { getMagInfo, getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; import { getAdditionalCoordinatesAsString, hasAdditionalCoordinates, } from "oxalis/model/accessors/flycam_accessor"; -import { pluralize, transformToCSVRow } from "libs/utils"; import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; +import { getBoundingBoxInMag1 } from "oxalis/model/sagas/volume/helpers"; import { voxelToVolumeInUnit } from "oxalis/model/scaleinfo"; +import { api } from "oxalis/singletons"; +import type { OxalisState, Segment } from "oxalis/store"; +import { useSelector } from "react-redux"; +import type { APISegmentationLayer, VoxelSize } from "types/api_flow_types"; +import { + type SegmentHierarchyGroup, + type SegmentHierarchyNode, + getVolumeRequestUrl, +} from "./segments_view_helper"; const MODAL_ERROR_MESSAGE = "Segment statistics could not be fetched. Check the console for more details."; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx index d17eb5ea64f..21f1232fa77 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view.tsx @@ -2,8 +2,8 @@ import { ArrowRightOutlined, CloseOutlined, DeleteOutlined, - DownloadOutlined, DownOutlined, + DownloadOutlined, ExclamationCircleOutlined, ExpandAltOutlined, EyeInvisibleOutlined, @@ -17,20 +17,21 @@ import { } from "@ant-design/icons"; import { getJobs, startComputeMeshFileJob } from "admin/admin_rest_api"; import { + PricingPlanEnum, getFeatureNotAvailableInPlanMessage, isFeatureAllowedByPricingPlan, - PricingPlanEnum, } from "admin/organization/pricing_plan_utils"; import { Button, ConfigProvider, Divider, Empty, + type MenuProps, Modal, Popover, Select, - type MenuProps, } from "antd"; +import type { ItemType } from "antd/lib/menu/interface"; import type { DataNode } from "antd/lib/tree"; import { ChangeColorMenuItemContent } from "components/color_picker"; import FastTooltip from "components/fast_tooltip"; @@ -40,9 +41,9 @@ import _, { isNumber, memoize } from "lodash"; import type { Vector3 } from "oxalis/constants"; import { EMPTY_OBJECT, MappingStatusEnum } from "oxalis/constants"; import { + getMagInfoOfVisibleSegmentationLayer, getMappingInfo, getMaybeSegmentIndexAvailability, - getMagInfoOfVisibleSegmentationLayer, getVisibleSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor"; @@ -97,11 +98,12 @@ import Store from "oxalis/store"; import ButtonComponent from "oxalis/view/components/button_component"; import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; import EditableTextLabel from "oxalis/view/components/editable_text_label"; +import { InputWithUpdateOnBlur } from "oxalis/view/components/input_with_update_on_blur"; import { getContextMenuPositionFromEvent } from "oxalis/view/context_menu"; import SegmentListItem from "oxalis/view/right-border-tabs/segments_tab/segment_list_item"; import { - getBaseSegmentationName, type SegmentHierarchyNode, + getBaseSegmentationName, } from "oxalis/view/right-border-tabs/segments_tab/segments_view_helper"; import type RcTree from "rc-tree"; import React, { type Key } from "react"; @@ -119,9 +121,12 @@ import { APIJobType, type AdditionalCoordinate } from "types/api_flow_types"; import type { ValueOf } from "types/globals"; import AdvancedSearchPopover from "../advanced_search_popover"; import DeleteGroupModalView from "../delete_group_modal_view"; +import { MetadataEntryTableRows } from "../metadata_table"; import { ResizableSplitPane } from "../resizable_split_pane"; +import ScrollableVirtualizedTree from "../scrollable_virtualized_tree"; import { ContextMenuContainer } from "../sidebar_context_menu"; import { + MISSING_GROUP_ID, additionallyExpandGroup, createGroupToParentMap, createGroupToSegmentsMap, @@ -129,13 +134,8 @@ import { getExpandedGroups, getGroupByIdWithSubgroups, getGroupNodeKey, - MISSING_GROUP_ID, } from "../tree_hierarchy_view_helpers"; -import { MetadataEntryTableRows } from "../metadata_table"; import { SegmentStatisticsModal } from "./segment_statistics_modal"; -import type { ItemType } from "antd/lib/menu/interface"; -import { InputWithUpdateOnBlur } from "oxalis/view/components/input_with_update_on_blur"; -import ScrollableVirtualizedTree from "../scrollable_virtualized_tree"; const SCROLL_DELAY_MS = 50; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 8ef6374d529..4c4e3f3ff4a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -1,18 +1,18 @@ +import { hasSegmentIndexInDataStore } from "admin/admin_rest_api"; import { Modal } from "antd"; -import type { APIDataLayer, APIDataset, APISegmentationLayer } from "types/api_flow_types"; -import type { ActiveMappingInfo, HybridTracing, Segment } from "oxalis/store"; -import Store from "oxalis/store"; -import { MappingStatusEnum } from "oxalis/constants"; -import { setMappingAction, setMappingEnabledAction } from "oxalis/model/actions/settings_actions"; +import type { BasicDataNode } from "antd/es/tree"; import { waitForCondition } from "libs/utils"; +import { MappingStatusEnum } from "oxalis/constants"; import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; import { getEditableMappingForVolumeTracingId, getVolumeTracingById, } from "oxalis/model/accessors/volumetracing_accessor"; +import { setMappingAction, setMappingEnabledAction } from "oxalis/model/actions/settings_actions"; +import type { ActiveMappingInfo, HybridTracing, Segment } from "oxalis/store"; +import Store from "oxalis/store"; import type { MenuClickEventHandler } from "rc-menu/lib/interface"; -import { hasSegmentIndexInDataStore } from "admin/admin_rest_api"; -import type { BasicDataNode } from "antd/es/tree"; +import type { APIDataLayer, APIDataset, APISegmentationLayer } from "types/api_flow_types"; const { confirm } = Modal; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view_helpers.ts b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view_helpers.ts index b8cf6e283df..cda611dd49d 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view_helpers.ts +++ b/frontend/javascripts/oxalis/view/right-border-tabs/tree_hierarchy_view_helpers.ts @@ -1,7 +1,7 @@ import type { DataNode } from "antd/es/tree"; import _ from "lodash"; import { mapGroupsWithRoot } from "oxalis/model/accessors/skeletontracing_accessor"; -import type { Tree, TreeGroup, SegmentMap, Segment, TreeMap, SegmentGroup } from "oxalis/store"; +import type { Segment, SegmentGroup, SegmentMap, Tree, TreeGroup, TreeMap } from "oxalis/store"; export const MISSING_GROUP_ID = -1; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx index 2de7e883f40..83273618a9a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/skeleton_tab_view.tsx @@ -1,99 +1,99 @@ -import { Dropdown, Empty, Spin, Modal, Tooltip, notification, type MenuProps, Space } from "antd"; -import type { Dispatch } from "redux"; import { - DownloadOutlined, DownOutlined, + DownloadOutlined, ExclamationCircleOutlined, SearchOutlined, UploadOutlined, WarningOutlined, } from "@ant-design/icons"; -import { batchActions } from "redux-batched-actions"; -import { connect } from "react-redux"; +import { BlobReader, BlobWriter, type Entry, ZipReader } from "@zip.js/zip.js"; +import { clearCache, getBuildInfo, importVolumeTracing } from "admin/admin_rest_api"; +import { Dropdown, Empty, type MenuProps, Modal, Space, Spin, Tooltip, notification } from "antd"; import { saveAs } from "file-saver"; -import { BlobReader, BlobWriter, ZipReader, type Entry } from "@zip.js/zip.js"; -import * as React from "react"; +import { formatLengthAsVx, formatNumberToLength } from "libs/format_utils"; +import { readFileAsArrayBuffer, readFileAsText } from "libs/read_file"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; import _ from "lodash"; import memoizeOne from "memoize-one"; -import type { Action } from "oxalis/model/actions/actions"; -import { addUserBoundingBoxesAction } from "oxalis/model/actions/annotation_actions"; -import { - createGroupToTreesMap, - callDeep, - MISSING_GROUP_ID, - GroupTypeEnum, - createGroupToParentMap, - additionallyExpandGroup, -} from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { formatNumberToLength, formatLengthAsVx } from "libs/format_utils"; -import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import messages from "messages"; +import { LongUnitToShortUnitMap } from "oxalis/constants"; +import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; import { + enforceSkeletonTracing, getActiveTree, getActiveTreeGroup, getTree, - enforceSkeletonTracing, isSkeletonLayerTransformed, } from "oxalis/model/accessors/skeletontracing_accessor"; -import { getBuildInfo, importVolumeTracing, clearCache } from "admin/admin_rest_api"; -import { - importVolumeTracingAction, - setLargestSegmentIdAction, -} from "oxalis/model/actions/volumetracing_actions"; -import { parseProtoTracing } from "oxalis/model/helpers/proto_helpers"; -import { readFileAsText, readFileAsArrayBuffer } from "libs/read_file"; -import { - serializeToNml, - getNmlName, - parseNml, - wrapInNewGroup, - NmlParseError, -} from "oxalis/model/helpers/nml_helpers"; -import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; +import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import type { Action } from "oxalis/model/actions/actions"; +import { addUserBoundingBoxesAction } from "oxalis/model/actions/annotation_actions"; +import { setVersionNumberAction } from "oxalis/model/actions/save_actions"; +import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import { - setTreeNameAction, + type BatchableUpdateTreeAction, + addTreesAndGroupsAction, + batchUpdateGroupsAndTreesAction, createTreeAction, - deleteTreesAction, deleteTreeAsUserAction, - shuffleAllTreeColorsAction, - selectNextTreeAction, - toggleAllTreesAction, - toggleInactiveTreesAction, - setActiveTreeAction, + deleteTreesAction, deselectActiveTreeAction, deselectActiveTreeGroupAction, + selectNextTreeAction, + setActiveTreeAction, setActiveTreeGroupAction, + setExpandedTreeGroupsByIdsAction, setTreeGroupAction, setTreeGroupsAction, - addTreesAndGroupsAction, - type BatchableUpdateTreeAction, - batchUpdateGroupsAndTreesAction, - setExpandedTreeGroupsByIdsAction, + setTreeNameAction, + shuffleAllTreeColorsAction, + toggleAllTreesAction, + toggleInactiveTreesAction, } from "oxalis/model/actions/skeletontracing_actions"; -import { setVersionNumberAction } from "oxalis/model/actions/save_actions"; -import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; -import ButtonComponent from "oxalis/view/components/button_component"; -import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; -import InputComponent from "oxalis/view/components/input_component"; +import { setDropzoneModalVisibilityAction } from "oxalis/model/actions/ui_actions"; +import { + importVolumeTracingAction, + setLargestSegmentIdAction, +} from "oxalis/model/actions/volumetracing_actions"; +import { + NmlParseError, + getNmlName, + parseNml, + serializeToNml, + wrapInNewGroup, +} from "oxalis/model/helpers/nml_helpers"; +import { parseProtoTracing } from "oxalis/model/helpers/proto_helpers"; +import { createMutableTreeMapFromTreeArray } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; import { Model } from "oxalis/singletons"; +import { api } from "oxalis/singletons"; import type { + MutableTreeMap, OxalisState, Tree, - TreeMap, TreeGroup, - MutableTreeMap, + TreeMap, UserBoundingBox, } from "oxalis/store"; import Store from "oxalis/store"; -import Toast from "libs/toast"; +import ButtonComponent from "oxalis/view/components/button_component"; +import DomVisibilityObserver from "oxalis/view/components/dom_visibility_observer"; +import InputComponent from "oxalis/view/components/input_component"; +import { + GroupTypeEnum, + MISSING_GROUP_ID, + additionallyExpandGroup, + callDeep, + createGroupToParentMap, + createGroupToTreesMap, +} from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import TreeHierarchyView from "oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_view"; -import * as Utils from "libs/utils"; -import { api } from "oxalis/singletons"; -import messages from "messages"; +import * as React from "react"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; +import { batchActions } from "redux-batched-actions"; import AdvancedSearchPopover from "../advanced_search_popover"; import DeleteGroupModalView from "../delete_group_modal_view"; -import { isAnnotationOwner } from "oxalis/model/accessors/annotation_accessor"; -import { LongUnitToShortUnitMap } from "oxalis/constants"; const { confirm } = Modal; const treeTabId = "tree-list"; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx index c43db01f878..b462d960088 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_renderers.tsx @@ -11,8 +11,8 @@ import { type MenuProps, notification } from "antd"; import _ from "lodash"; import { LongUnitToShortUnitMap, - TreeTypeEnum, type TreeType, + TreeTypeEnum, type Vector3, } from "oxalis/constants"; import type { Action } from "oxalis/model/actions/actions"; @@ -38,21 +38,21 @@ import { toggleInactiveTreesAction, } from "oxalis/model/actions/skeletontracing_actions"; import { getMaximumGroupId } from "oxalis/model/reducers/skeletontracing_reducer_helpers"; -import { api, Store } from "oxalis/singletons"; +import { Store, api } from "oxalis/singletons"; import type { Tree, TreeGroup, TreeMap } from "oxalis/store"; import { + GroupTypeEnum, + MISSING_GROUP_ID, + type TreeNode, anySatisfyDeep, callDeep, createGroupToTreesMap, getGroupByIdWithSubgroups, getNodeKey, - GroupTypeEnum, makeBasicGroupObject, - MISSING_GROUP_ID, - type TreeNode, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; -import { HideTreeEdgesIcon } from "./hide_tree_edges_icon"; import { ColoredDotIcon } from "../segments_tab/segment_list_item"; +import { HideTreeEdgesIcon } from "./hide_tree_edges_icon"; export type Props = { activeTreeId: number | null | undefined; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_view.tsx index e25c28cc865..b044d4ff85a 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/trees_tab/tree_hierarchy_view.tsx @@ -1,46 +1,46 @@ import { DownOutlined } from "@ant-design/icons"; import { type Tree as AntdTree, type GetRef, type MenuProps, Modal, type TreeProps } from "antd"; -import React, { memo, useCallback, useEffect, useRef, useState } from "react"; -import AutoSizer from "react-virtualized-auto-sizer"; +import * as Utils from "libs/utils"; import { mapGroups } from "oxalis/model/accessors/skeletontracing_accessor"; import { setTreeGroupAction, - setTreeNameAction, setTreeMetadataAction, + setTreeNameAction, toggleAllTreesAction, toggleTreeAction, toggleTreeGroupAction, } from "oxalis/model/actions/skeletontracing_actions"; -import * as Utils from "libs/utils"; import { Store } from "oxalis/singletons"; import type { Tree, TreeGroup, TreeMap } from "oxalis/store"; import { + GroupTypeEnum, + MISSING_GROUP_ID, + type TreeNode, createGroupToTreesMap, deepFlatFilter, findParentGroupNode, getNodeKey, - GroupTypeEnum, insertTreesAndTransform, - MISSING_GROUP_ID, moveGroupsHelper, - type TreeNode, } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; +import React, { memo, useCallback, useEffect, useRef, useState } from "react"; +import AutoSizer from "react-virtualized-auto-sizer"; +import type { MetadataEntryProto } from "types/api_flow_types"; +import { InputWithUpdateOnBlur } from "../../components/input_with_update_on_blur"; import { getContextMenuPositionFromEvent } from "../../context_menu"; +import { MetadataEntryTableRows } from "../metadata_table"; +import { ResizableSplitPane } from "../resizable_split_pane"; +import ScrollableVirtualizedTree from "../scrollable_virtualized_tree"; import { ContextMenuContainer } from "../sidebar_context_menu"; import { - onBatchActions, type Props, + onBatchActions, renderGroupNode, renderTreeNode, selectGroupById, setExpandedGroups, setUpdateTreeGroups, } from "./tree_hierarchy_renderers"; -import { ResizableSplitPane } from "../resizable_split_pane"; -import { MetadataEntryTableRows } from "../metadata_table"; -import type { MetadataEntryProto } from "types/api_flow_types"; -import { InputWithUpdateOnBlur } from "../../components/input_with_update_on_blur"; -import ScrollableVirtualizedTree from "../scrollable_virtualized_tree"; const onCheck: TreeProps["onCheck"] = (_checkedKeysValue, info) => { const { id, type } = info.node; @@ -214,7 +214,7 @@ function TreeHierarchyView(props: Props) { const parentGroupId = dragTargetNode.type === GroupTypeEnum.GROUP ? dragTargetNode.id - : props.trees[dragTargetNode.id].groupId ?? MISSING_GROUP_ID; + : (props.trees[dragTargetNode.id].groupId ?? MISSING_GROUP_ID); let updatedTreeGroups: TreeGroup[] = props.treeGroups; if (draggedNode.type === GroupTypeEnum.TREE) { diff --git a/frontend/javascripts/oxalis/view/scalebar.tsx b/frontend/javascripts/oxalis/view/scalebar.tsx index 54cc894d7ec..5b782648710 100644 --- a/frontend/javascripts/oxalis/view/scalebar.tsx +++ b/frontend/javascripts/oxalis/view/scalebar.tsx @@ -1,13 +1,13 @@ -import { connect } from "react-redux"; -import type { APIDataset } from "types/api_flow_types"; -import type { OxalisState } from "oxalis/store"; +import FastTooltip from "components/fast_tooltip"; import { formatNumberToLength } from "libs/format_utils"; -import { getViewportExtents, getTDViewZoom } from "oxalis/model/accessors/view_mode_accessor"; -import { getZoomValue } from "oxalis/model/accessors/flycam_accessor"; import type { OrthoView } from "oxalis/constants"; import constants, { Unicode, OrthoViews, LongUnitToShortUnitMap } from "oxalis/constants"; +import { getZoomValue } from "oxalis/model/accessors/flycam_accessor"; +import { getTDViewZoom, getViewportExtents } from "oxalis/model/accessors/view_mode_accessor"; import { getBaseVoxelInUnit } from "oxalis/model/scaleinfo"; -import FastTooltip from "components/fast_tooltip"; +import type { OxalisState } from "oxalis/store"; +import { connect } from "react-redux"; +import type { APIDataset } from "types/api_flow_types"; const { ThinSpace, MultiplicationSymbol } = Unicode; diff --git a/frontend/javascripts/oxalis/view/statusbar.tsx b/frontend/javascripts/oxalis/view/statusbar.tsx index e2fe885cdc4..594772f7a7e 100644 --- a/frontend/javascripts/oxalis/view/statusbar.tsx +++ b/frontend/javascripts/oxalis/view/statusbar.tsx @@ -1,44 +1,44 @@ -import { useDispatch, useSelector } from "react-redux"; -import React, { useCallback, useState } from "react"; -import { WarningOutlined, MoreOutlined, DownloadOutlined } from "@ant-design/icons"; +import { DownloadOutlined, MoreOutlined, WarningOutlined } from "@ant-design/icons"; +import FastTooltip from "components/fast_tooltip"; +import { formatCountToDataAmountUnit } from "libs/format_utils"; +import { V3 } from "libs/mjs"; +import { useInterval } from "libs/react_helpers"; +import { useKeyPress } from "libs/react_hooks"; +import message from "messages"; +import messages from "messages"; import type { Vector3 } from "oxalis/constants"; import { AltOrOptionKey, MappingStatusEnum, OrthoViews } from "oxalis/constants"; +import { + type ActionDescriptor, + getToolClassForAnnotationTool, +} from "oxalis/controller/combinations/tool_controls"; import { getMappingInfoOrNull, getVisibleSegmentationLayer, hasVisibleUint64Segmentation, } from "oxalis/model/accessors/dataset_accessor"; -import { NumberInputPopoverSetting } from "oxalis/view/components/setting_input_views"; -import { useKeyPress } from "libs/react_hooks"; import { getActiveMagInfo } from "oxalis/model/accessors/flycam_accessor"; -import { setActiveCellAction } from "oxalis/model/actions/volumetracing_actions"; -import { - setActiveNodeAction, - setActiveTreeAction, -} from "oxalis/model/actions/skeletontracing_actions"; -import { formatCountToDataAmountUnit } from "libs/format_utils"; -import message from "messages"; -import { - type ActionDescriptor, - getToolClassForAnnotationTool, -} from "oxalis/controller/combinations/tool_controls"; +import { adaptActiveToolToShortcuts } from "oxalis/model/accessors/tool_accessor"; import { calculateGlobalPos, isPlaneMode as getIsPlaneMode, } from "oxalis/model/accessors/view_mode_accessor"; -import { adaptActiveToolToShortcuts } from "oxalis/model/accessors/tool_accessor"; -import { V3 } from "libs/mjs"; -import type { OxalisState } from "oxalis/store"; import { getActiveSegmentationTracing, getReadableNameForLayerName, } from "oxalis/model/accessors/volumetracing_accessor"; +import { + setActiveNodeAction, + setActiveTreeAction, +} from "oxalis/model/actions/skeletontracing_actions"; +import { setActiveCellAction } from "oxalis/model/actions/volumetracing_actions"; import { getGlobalDataConnectionInfo } from "oxalis/model/data_connection_info"; -import { useInterval } from "libs/react_helpers"; -import type { AdditionalCoordinate } from "types/api_flow_types"; -import FastTooltip from "components/fast_tooltip"; import { Store } from "oxalis/singletons"; -import messages from "messages"; +import type { OxalisState } from "oxalis/store"; +import { NumberInputPopoverSetting } from "oxalis/view/components/setting_input_views"; +import React, { useCallback, useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; +import type { AdditionalCoordinate } from "types/api_flow_types"; const lineColor = "rgba(255, 255, 255, 0.67)"; const moreIconStyle = { diff --git a/frontend/javascripts/oxalis/view/td_view_controls.tsx b/frontend/javascripts/oxalis/view/td_view_controls.tsx index 266246411ba..2761f998b9b 100644 --- a/frontend/javascripts/oxalis/view/td_view_controls.tsx +++ b/frontend/javascripts/oxalis/view/td_view_controls.tsx @@ -1,29 +1,29 @@ +import { + BorderInnerOutlined, + BorderOuterOutlined, + SettingOutlined, + StopOutlined, +} from "@ant-design/icons"; import { Button, - Radio, - Tooltip, - Dropdown, Col, - Row, - Switch, - type RadioChangeEvent, + Dropdown, type MenuProps, + Radio, + type RadioChangeEvent, + Row, Space, + Switch, + Tooltip, } from "antd"; -import { - StopOutlined, - BorderInnerOutlined, - BorderOuterOutlined, - SettingOutlined, -} from "@ant-design/icons"; -import { connect } from "react-redux"; -import type { Dispatch } from "redux"; -import type { OxalisState } from "oxalis/store"; +import type { SwitchChangeEventHandler } from "antd/lib/switch"; import type { TDViewDisplayMode } from "oxalis/constants"; import { TDViewDisplayModeEnum } from "oxalis/constants"; import { updateUserSettingAction } from "oxalis/model/actions/settings_actions"; import { api } from "oxalis/singletons"; -import type { SwitchChangeEventHandler } from "antd/lib/switch"; +import type { OxalisState } from "oxalis/store"; +import { connect } from "react-redux"; +import type { Dispatch } from "redux"; type Props = { tdViewDisplayPlanes: TDViewDisplayMode; tdViewDisplayDatasetBorders: boolean; diff --git a/frontend/javascripts/oxalis/view/tracing_view.tsx b/frontend/javascripts/oxalis/view/tracing_view.tsx index 6da611cf347..cfe8b4e77d5 100644 --- a/frontend/javascripts/oxalis/view/tracing_view.tsx +++ b/frontend/javascripts/oxalis/view/tracing_view.tsx @@ -1,10 +1,10 @@ -import type * as React from "react"; +import ErrorHandling from "libs/error_handling"; import Toast from "libs/toast"; import messages from "messages"; -import ErrorHandling from "libs/error_handling"; -import Store from "oxalis/store"; import { setViewModeAction } from "oxalis/model/actions/settings_actions"; import { api } from "oxalis/singletons"; +import Store from "oxalis/store"; +import type * as React from "react"; const WEBGL_CONTEXT_LOST_KEY = "WEBGL_CONTEXT_LOST_KEY"; diff --git a/frontend/javascripts/oxalis/view/version_entry.tsx b/frontend/javascripts/oxalis/view/version_entry.tsx index 6b414d7c234..5add614937a 100644 --- a/frontend/javascripts/oxalis/view/version_entry.tsx +++ b/frontend/javascripts/oxalis/view/version_entry.tsx @@ -1,9 +1,8 @@ -import { Avatar, Button, List } from "antd"; import { ArrowsAltOutlined, BackwardOutlined, - CodepenOutlined, CodeSandboxOutlined, + CodepenOutlined, DeleteOutlined, EditOutlined, EyeOutlined, @@ -12,46 +11,47 @@ import { RocketOutlined, ShrinkOutlined, } from "@ant-design/icons"; -import * as React from "react"; +import { Avatar, Button, List } from "antd"; import _ from "lodash"; +import * as React from "react"; import classNames from "classnames"; +import FormattedDate from "components/formatted_date"; +import { formatUserName, getContributorById } from "oxalis/model/accessors/user_accessor"; +import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; import type { - ServerUpdateAction, + AddLayerToAnnotationUpdateAction, + AddSegmentIndexUpdateAction, + CreateEdgeUpdateAction, CreateNodeUpdateAction, + CreateSegmentUpdateAction, + DeleteAnnotationLayerUpdateAction, + DeleteEdgeUpdateAction, DeleteNodeUpdateAction, - UpdateTreeUpdateAction, + DeleteSegmentDataUpdateAction, + DeleteSegmentUpdateAction, DeleteTreeUpdateAction, - RevertToVersionUpdateAction, - UpdateNodeUpdateAction, - UpdateTreeVisibilityUpdateAction, - UpdateTreeEdgesVisibilityUpdateAction, - UpdateTreeGroupVisibilityUpdateAction, - CreateEdgeUpdateAction, - DeleteEdgeUpdateAction, - SplitAgglomerateUpdateAction, MergeAgglomerateUpdateAction, - CreateSegmentUpdateAction, - UpdateSegmentUpdateAction, - DeleteSegmentUpdateAction, - MoveTreeComponentUpdateAction, MergeTreeUpdateAction, + MoveTreeComponentUpdateAction, + RevertToVersionUpdateAction, + ServerUpdateAction, + SplitAgglomerateUpdateAction, UpdateAnnotationLayerNameUpdateAction, + UpdateBucketUpdateAction, UpdateMappingNameUpdateAction, - DeleteSegmentDataUpdateAction, - AddLayerToAnnotationUpdateAction, - DeleteAnnotationLayerUpdateAction, UpdateMetadataOfAnnotationUpdateAction, - UpdateBucketUpdateAction, + UpdateNodeUpdateAction, UpdateSegmentGroupsUpdateAction, - AddSegmentIndexUpdateAction, + UpdateSegmentUpdateAction, + UpdateTreeEdgesVisibilityUpdateAction, + UpdateTreeGroupVisibilityUpdateAction, + UpdateTreeUpdateAction, + UpdateTreeVisibilityUpdateAction, } from "oxalis/model/sagas/update_actions"; -import FormattedDate from "components/formatted_date"; +import type { HybridTracing, OxalisState } from "oxalis/store"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import { useSelector } from "react-redux"; -import type { HybridTracing, OxalisState } from "oxalis/store"; -import { formatUserName, getContributorById } from "oxalis/model/accessors/user_accessor"; -import { getReadableNameByVolumeTracingId } from "oxalis/model/accessors/volumetracing_accessor"; type Description = { description: string; icon: React.ReactNode; @@ -99,7 +99,7 @@ const descriptionFns: Record< const segment1Description = action.value.segmentPosition1 != null ? `at position ${action.value.segmentPosition1}` - : action.value.segmentId1 ?? "unknown"; + : (action.value.segmentId1 ?? "unknown"); const segment2Description = action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; const description = `Split agglomerate ${action.value.agglomerateId} by separating the segments ${segment1Description} and ${segment2Description}.`; @@ -112,7 +112,7 @@ const descriptionFns: Record< const segment1Description = action.value.segmentPosition1 != null ? `at position ${action.value.segmentPosition1}` - : action.value.segmentId1 ?? "unknown"; + : (action.value.segmentId1 ?? "unknown"); const segment2Description = action.value.segmentPosition2 ?? action.value.segmentId1 ?? "unknown"; const description = `Merged agglomerates ${action.value.agglomerateId1} and ${action.value.agglomerateId2} by combining the segments ${segment1Description} and ${segment2Description}.`; diff --git a/frontend/javascripts/oxalis/view/version_entry_group.tsx b/frontend/javascripts/oxalis/view/version_entry_group.tsx index 4d5783fc148..a3be4824f29 100644 --- a/frontend/javascripts/oxalis/view/version_entry_group.tsx +++ b/frontend/javascripts/oxalis/view/version_entry_group.tsx @@ -1,10 +1,10 @@ -import * as React from "react"; +import { CaretDownOutlined, CaretRightOutlined } from "@ant-design/icons"; import { Avatar, List } from "antd"; -import _ from "lodash"; -import type { APIUpdateActionBatch } from "types/api_flow_types"; import FormattedDate from "components/formatted_date"; +import _ from "lodash"; import VersionEntry from "oxalis/view/version_entry"; -import { CaretDownOutlined, CaretRightOutlined } from "@ant-design/icons"; +import * as React from "react"; +import type { APIUpdateActionBatch } from "types/api_flow_types"; type Props = { batches: APIUpdateActionBatch[]; diff --git a/frontend/javascripts/oxalis/view/version_list.tsx b/frontend/javascripts/oxalis/view/version_list.tsx index c6dba3508cc..f217c2c6e24 100644 --- a/frontend/javascripts/oxalis/view/version_list.tsx +++ b/frontend/javascripts/oxalis/view/version_list.tsx @@ -1,37 +1,37 @@ -import { Button, List, Spin } from "antd"; -import { useState, useEffect } from "react"; -import _ from "lodash"; -import dayjs from "dayjs"; -import type { APIUpdateActionBatch } from "types/api_flow_types"; -import { chunkIntoTimeWindows } from "libs/utils"; +import { useInfiniteQuery, useQueryClient } from "@tanstack/react-query"; import { - getUpdateActionLog, downloadAnnotation, - getNewestVersionForAnnotation, getAnnotationProto, + getNewestVersionForAnnotation, + getUpdateActionLog, } from "admin/admin_rest_api"; +import { Button, List, Spin } from "antd"; +import dayjs from "dayjs"; import { handleGenericError } from "libs/error_handling"; +import { useFetch } from "libs/react_helpers"; +import { useEffectOnlyOnce } from "libs/react_hooks"; +import { chunkIntoTimeWindows } from "libs/utils"; +import _ from "lodash"; +import { getCreationTimestamp } from "oxalis/model/accessors/annotation_accessor"; +import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { pushSaveQueueTransactionIsolated, setVersionNumberAction, } from "oxalis/model/actions/save_actions"; +import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { + type ServerUpdateAction, revertToVersion, serverCreateTracing, - type ServerUpdateAction, } from "oxalis/model/sagas/update_actions"; -import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; -import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import { Model } from "oxalis/singletons"; +import { api } from "oxalis/singletons"; import type { HybridTracing, OxalisState } from "oxalis/store"; import Store from "oxalis/store"; import VersionEntryGroup from "oxalis/view/version_entry_group"; -import { api } from "oxalis/singletons"; -import { useInfiniteQuery, useQueryClient } from "@tanstack/react-query"; -import { useEffectOnlyOnce } from "libs/react_hooks"; -import { useFetch } from "libs/react_helpers"; +import { useEffect, useState } from "react"; import { useSelector } from "react-redux"; -import { getCreationTimestamp } from "oxalis/model/accessors/annotation_accessor"; +import type { APIUpdateActionBatch } from "types/api_flow_types"; const ENTRIES_PER_PAGE = 5000; diff --git a/frontend/javascripts/oxalis/view/version_view.tsx b/frontend/javascripts/oxalis/view/version_view.tsx index e755a6c6f38..60a1f475377 100644 --- a/frontend/javascripts/oxalis/view/version_view.tsx +++ b/frontend/javascripts/oxalis/view/version_view.tsx @@ -1,14 +1,14 @@ -import { Button, Alert } from "antd"; import { CloseOutlined } from "@ant-design/icons"; -import { connect, useDispatch } from "react-redux"; -import * as React from "react"; +import { Alert, Button } from "antd"; +import { useWillUnmount } from "beautiful-react-hooks"; import { setAnnotationAllowUpdateAction } from "oxalis/model/actions/annotation_actions"; import { setVersionRestoreVisibilityAction } from "oxalis/model/actions/ui_actions"; import type { OxalisState, Tracing } from "oxalis/store"; import Store from "oxalis/store"; import VersionList, { previewVersion } from "oxalis/view/version_list"; +import * as React from "react"; import { useState } from "react"; -import { useWillUnmount } from "beautiful-react-hooks"; +import { connect, useDispatch } from "react-redux"; export type Versions = { skeleton?: number | null | undefined; diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.tsx b/frontend/javascripts/oxalis/view/viewport_status_indicator.tsx index bd2a708d4ec..ffee594861b 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.tsx +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.tsx @@ -1,10 +1,10 @@ -import * as React from "react"; -import _ from "lodash"; import { WarningOutlined } from "@ant-design/icons"; -import { getUnrenderableLayerInfosForCurrentZoom } from "oxalis/model/accessors/flycam_accessor"; +import FastTooltip from "components/fast_tooltip"; import { usePolledState } from "libs/react_helpers"; +import _ from "lodash"; +import { getUnrenderableLayerInfosForCurrentZoom } from "oxalis/model/accessors/flycam_accessor"; import type { SmallerOrHigherInfo } from "oxalis/model/helpers/mag_info"; -import FastTooltip from "components/fast_tooltip"; +import * as React from "react"; const { useState } = React; type UnrenderableLayerNamesInfo = { diff --git a/frontend/javascripts/oxalis/workers/async_bucket_picker.worker.ts b/frontend/javascripts/oxalis/workers/async_bucket_picker.worker.ts index 61df9c1247a..ec8409ab0e3 100644 --- a/frontend/javascripts/oxalis/workers/async_bucket_picker.worker.ts +++ b/frontend/javascripts/oxalis/workers/async_bucket_picker.worker.ts @@ -1,10 +1,10 @@ import PriorityQueue from "js-priority-queue"; -import type { LoadingStrategy, PlaneRects } from "oxalis/store"; import type { Matrix4x4 } from "libs/mjs"; import type { Vector3, Vector4, ViewMode } from "oxalis/constants"; import constants from "oxalis/constants"; import determineBucketsForFlight from "oxalis/model/bucket_data_handling/bucket_picker_strategies/flight_bucket_picker"; import determineBucketsForOblique from "oxalis/model/bucket_data_handling/bucket_picker_strategies/oblique_bucket_picker"; +import type { LoadingStrategy, PlaneRects } from "oxalis/store"; import { expose } from "./comlink_wrapper"; type PriorityItem = { diff --git a/frontend/javascripts/oxalis/workers/byte_array_lz4_compression.worker.ts b/frontend/javascripts/oxalis/workers/byte_array_lz4_compression.worker.ts index 7612047af0e..9e194fa7280 100644 --- a/frontend/javascripts/oxalis/workers/byte_array_lz4_compression.worker.ts +++ b/frontend/javascripts/oxalis/workers/byte_array_lz4_compression.worker.ts @@ -1,5 +1,5 @@ -import { expose } from "./comlink_wrapper"; import * as lz4 from "lz4-wasm"; +import { expose } from "./comlink_wrapper"; function compressLz4Block(data: Uint8Array, compress: boolean): Uint8Array { if (compress) { diff --git a/frontend/javascripts/oxalis/workers/slow_byte_array_lz4_compression.worker.ts b/frontend/javascripts/oxalis/workers/slow_byte_array_lz4_compression.worker.ts index 67cf5ee2ec2..9f3b9f2440b 100644 --- a/frontend/javascripts/oxalis/workers/slow_byte_array_lz4_compression.worker.ts +++ b/frontend/javascripts/oxalis/workers/slow_byte_array_lz4_compression.worker.ts @@ -1,8 +1,8 @@ +import { sleep } from "libs/utils"; // NOTE: This is a mirror of byte_array_lz4_compression.worker.js // and is ONLY meant for mocking during tests. This implementation // allows to introduce an artificial delay for compression/decompression. import { __compressLz4BlockHelper } from "oxalis/workers/byte_array_lz4_compression.worker"; -import { sleep } from "libs/utils"; let isSleepEnabled = false; export function setSlowCompression(isEnabled: boolean) { isSleepEnabled = isEnabled; diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index a016e394a8f..4a3d24c73e3 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -1,7 +1,7 @@ import { createExplorational, - getUnversionedAnnotationInformation, getShortLink, + getUnversionedAnnotationInformation, } from "admin/admin_rest_api"; import AcceptInviteView from "admin/auth/accept_invite_view"; import AuthTokenView from "admin/auth/auth_token_view"; @@ -13,8 +13,8 @@ import StartResetPasswordView from "admin/auth/start_reset_password_view"; import DatasetAddView from "admin/dataset/dataset_add_view"; import JobListView from "admin/job/job_list_view"; import Onboarding from "admin/onboarding"; -import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import OrganizationEditView from "admin/organization/organization_edit_view"; +import { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import ProjectCreateView from "admin/project/project_create_view"; import ProjectListView from "admin/project/project_list_view"; import ScriptCreateView from "admin/scripts/script_create_view"; @@ -59,20 +59,20 @@ import { TracingTypeEnum, } from "types/api_flow_types"; -import ErrorBoundary from "components/error_boundary"; -import { Store } from "oxalis/singletons"; -import VerifyEmailView from "admin/auth/verify_email_view"; -import TimeTrackingOverview from "admin/statistic/time_tracking_overview"; -import loadable from "libs/lazy_loader"; -import type { EmptyObject } from "types/globals"; -import { DatasetURLImport } from "admin/dataset/dataset_url_import"; -import AiModelListView from "admin/voxelytics/ai_model_list_view"; -import { CheckTermsOfServices } from "components/terms_of_services_check"; import { getDatasetIdFromNameAndOrganization, getOrganizationForDataset, } from "admin/api/disambiguate_legacy_routes"; +import VerifyEmailView from "admin/auth/verify_email_view"; +import { DatasetURLImport } from "admin/dataset/dataset_url_import"; +import TimeTrackingOverview from "admin/statistic/time_tracking_overview"; +import AiModelListView from "admin/voxelytics/ai_model_list_view"; +import ErrorBoundary from "components/error_boundary"; +import { CheckTermsOfServices } from "components/terms_of_services_check"; +import loadable from "libs/lazy_loader"; import { getDatasetIdOrNameFromReadableURLPart } from "oxalis/model/accessors/dataset_accessor"; +import { Store } from "oxalis/singletons"; +import type { EmptyObject } from "types/globals"; const { Content } = Layout; diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 907fee27b34..5a4d54da82f 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -16,14 +16,14 @@ const AccessorMock = { mockRequire("libs/date", DateMock); mockRequire("oxalis/model/accessors/annotation_accessor", AccessorMock); -const SaveActions = mockRequire.reRequire( +const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions") as typeof import( "oxalis/model/actions/save_actions", -) as typeof import("oxalis/model/actions/save_actions"); +); const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; -const { createEdge } = mockRequire.reRequire( +const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( "oxalis/model/sagas/update_actions", -) as typeof import("oxalis/model/sagas/update_actions"); +); const tracingId = "1234567890"; const initialState = { diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index ca21bf98201..17a85a24811 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -16,7 +16,9 @@ import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; const { createTreeMapFromTreeArray, generateTreeName } = - require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import( + "oxalis/model/reducers/skeletontracing_reducer_helpers", + ); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", @@ -24,9 +26,9 @@ const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( const { discardSaveQueuesAction } = mockRequire.reRequire( "oxalis/model/actions/save_actions", ) as typeof import("oxalis/model/actions/save_actions"); -const UpdateActions = mockRequire.reRequire( +const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( "oxalis/model/sagas/update_actions", -) as typeof import("oxalis/model/sagas/update_actions"); +); test.beforeEach(async (t) => { // Setup oxalis, this will execute model.fetch(...) and initialize the store with the tracing, etc. diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 107d06edcbd..0c6a2a5e51b 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -18,24 +18,24 @@ mockRequire("libs/date", DateMock); mockRequire("oxalis/model/sagas/root_saga", function* () { yield; }); -const UpdateActions = mockRequire.reRequire( +const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( "oxalis/model/sagas/update_actions", -) as typeof import("oxalis/model/sagas/update_actions"); -const SaveActions = mockRequire.reRequire( +); +const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions") as typeof import( "oxalis/model/actions/save_actions", -) as typeof import("oxalis/model/actions/save_actions"); -const { take, call, put } = mockRequire.reRequire( +); +const { take, call, put } = mockRequire.reRequire("redux-saga/effects") as typeof import( "redux-saga/effects", -) as typeof import("redux-saga/effects"); +); const { pushSaveQueueAsync, sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, -} = mockRequire.reRequire( +} = mockRequire.reRequire("oxalis/model/sagas/save_saga") as typeof import( "oxalis/model/sagas/save_saga", -) as typeof import("oxalis/model/sagas/save_saga"); +); const annotationId = "annotation-abcdefgh"; const tracingId = "tracing-1234567890"; diff --git a/frontend/javascripts/theme.tsx b/frontend/javascripts/theme.tsx index c3f1a2c71bf..deafa4c3daa 100644 --- a/frontend/javascripts/theme.tsx +++ b/frontend/javascripts/theme.tsx @@ -1,12 +1,12 @@ +import { App, ConfigProvider, theme } from "antd"; +import type { AliasToken, OverrideToken } from "antd/lib/theme/interface"; +import { ToastContextMountRoot } from "libs/toast"; +import window from "libs/window"; +import type { OxalisState, Theme } from "oxalis/store"; import type React from "react"; import { useEffect } from "react"; import { useSelector } from "react-redux"; -import { App, ConfigProvider, theme } from "antd"; import type { APIUser } from "types/api_flow_types"; -import window from "libs/window"; -import type { OxalisState, Theme } from "oxalis/store"; -import type { AliasToken, OverrideToken } from "antd/lib/theme/interface"; -import { ToastContextMountRoot } from "libs/toast"; const ColorWKBlue = "#5660ff"; // WK ~blue/purple const ColorWKLinkHover = "#a8b4ff"; // slightly brighter WK Blue diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index fdb3622128f..3b87a018769 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -1,30 +1,30 @@ +import type { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; import _ from "lodash"; import type { - BoundingBoxObject, - Edge, - CommentType, - TreeGroup, - RecommendedConfiguration, - SegmentGroup, - MeshInformation, -} from "oxalis/store"; -import type { ServerUpdateAction } from "oxalis/model/sagas/update_actions"; + ColorObject, + LOG_LEVELS, + Point3, + TreeType, + UnitLong, + Vector3, + Vector4, + Vector6, +} from "oxalis/constants"; import type { SkeletonTracingStats, TracingStats, VolumeTracingStats, } from "oxalis/model/accessors/annotation_accessor"; +import type { ServerUpdateAction } from "oxalis/model/sagas/update_actions"; import type { - Vector3, - Vector6, - Point3, - ColorObject, - LOG_LEVELS, - Vector4, - TreeType, - UnitLong, -} from "oxalis/constants"; -import type { PricingPlanEnum } from "admin/organization/pricing_plan_utils"; + BoundingBoxObject, + CommentType, + Edge, + MeshInformation, + RecommendedConfiguration, + SegmentGroup, + TreeGroup, +} from "oxalis/store"; import type { EmptyObject } from "./globals"; export type AdditionalCoordinate = { name: string; value: number }; diff --git a/frontend/javascripts/types/schemas/dataset_view_configuration.schema.ts b/frontend/javascripts/types/schemas/dataset_view_configuration.schema.ts index 672acab0ad2..758414ee74c 100644 --- a/frontend/javascripts/types/schemas/dataset_view_configuration.schema.ts +++ b/frontend/javascripts/types/schemas/dataset_view_configuration.schema.ts @@ -1,5 +1,5 @@ import { BLEND_MODES } from "oxalis/constants"; -import type { DatasetLayerConfiguration, DatasetConfiguration } from "oxalis/store"; +import type { DatasetConfiguration, DatasetLayerConfiguration } from "oxalis/store"; export function getDefaultLayerViewConfiguration( dynamicDefault: Partial = {}, diff --git a/frontend/javascripts/types/schemas/dataset_view_configuration_defaults.ts b/frontend/javascripts/types/schemas/dataset_view_configuration_defaults.ts index 8aec61a2c73..2792929c262 100644 --- a/frontend/javascripts/types/schemas/dataset_view_configuration_defaults.ts +++ b/frontend/javascripts/types/schemas/dataset_view_configuration_defaults.ts @@ -1,10 +1,10 @@ import _ from "lodash"; +import { getDefaultValueRangeOfLayer, isColorLayer } from "oxalis/model/accessors/dataset_accessor"; +import type { APIDataLayer, APIDataset, APIMaybeUnimportedDataset } from "types/api_flow_types"; import { - getDefaultLayerViewConfiguration, defaultDatasetViewConfiguration, + getDefaultLayerViewConfiguration, } from "types/schemas/dataset_view_configuration.schema"; -import type { APIDataset, APIMaybeUnimportedDataset, APIDataLayer } from "types/api_flow_types"; -import { getDefaultValueRangeOfLayer, isColorLayer } from "oxalis/model/accessors/dataset_accessor"; import { validateObjectWithType } from "types/validation"; const eliminateErrors = ( diff --git a/frontend/javascripts/types/schemas/user_settings.schema.ts b/frontend/javascripts/types/schemas/user_settings.schema.ts index 5258e7ef287..f83a2ceaa5d 100644 --- a/frontend/javascripts/types/schemas/user_settings.schema.ts +++ b/frontend/javascripts/types/schemas/user_settings.schema.ts @@ -1,11 +1,11 @@ import { - OverwriteModeEnum, FillModeEnum, - TDViewDisplayModeEnum, InterpolationModeEnum, + OverwriteModeEnum, + TDViewDisplayModeEnum, } from "oxalis/constants"; -import { baseDatasetViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; import { getMaximumBrushSize } from "oxalis/model/accessors/volumetracing_accessor"; +import { baseDatasetViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; export const userSettings = { clippingDistance: { diff --git a/frontend/javascripts/types/validation.ts b/frontend/javascripts/types/validation.ts index 2bd7baf1fcc..f356510c40a 100644 --- a/frontend/javascripts/types/validation.ts +++ b/frontend/javascripts/types/validation.ts @@ -1,8 +1,8 @@ import jsonschema from "jsonschema"; -import DatasourceSchema from "types/schemas/datasource.schema"; -import UserSettingsSchema from "types/schemas/user_settings.schema"; import ViewConfigurationSchema from "types/schemas/dataset_view_configuration.schema"; +import DatasourceSchema from "types/schemas/datasource.schema"; import UrlStateSchema from "types/schemas/url_state.schema"; +import UserSettingsSchema from "types/schemas/user_settings.schema"; const validator = new jsonschema.Validator(); // @ts-expect-error ts-migrate(2345) FIXME: Argument of type '{ definitions: { "types::Vector3... Remove this comment to see the full error message validator.addSchema(DatasourceSchema, "/"); diff --git a/package.json b/package.json index 48eef700af7..fc909911d9a 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "url": "git://github.com/scalableminds/webknossos.git" }, "devDependencies": { - "@biomejs/biome": "^1.8.3", + "@biomejs/biome": "^1.9.4", "@redux-saga/testing-utils": "^1.1.5", "@shaderfrog/glsl-parser": "^0.3.0", "@types/color-hash": "^1.0.2", @@ -103,8 +103,8 @@ "stash-pop-cmake-cache": "[ -f webknossos-jni/target/native/x86_64-linux/build/CMakeCache.txt.bak ] && mv webknossos-jni/target/native/x86_64-linux/build/CMakeCache.txt.bak webknossos-jni/target/native/x86_64-linux/build/CMakeCache.txt || true", "refresh-e2e-snapshots": "yarn remove-e2e-snapshots && mkdir -p frontend/javascripts/test/snapshots/type-check && yarn stash-cmake-cache && docker compose down && docker compose up e2e-tests && yarn stash-pop-cmake-cache", "refresh-all-snapshots": "yarn remove-all-snapshots && yarn test && yarn refresh-e2e-snapshots", - "check-frontend": "yarn run biome check frontend package.json --organize-imports-enabled=false && tools/assert-no-test-only.sh", - "fix-frontend": "yarn run biome check frontend package.json --write --organize-imports-enabled=false && echo Please proofread the applied suggestions, as they may not be safe.", + "check-frontend": "yarn run biome check frontend package.json && tools/assert-no-test-only.sh", + "fix-frontend": "yarn run biome check frontend package.json --write && echo Please proofread the applied suggestions, as they may not be safe.", "format-backend": "sbt \";scalafmt; util/scalafmt; webknossosTracingstore/scalafmt; webknossosDatastore/scalafmt\"", "lint-backend": "sbt \";scapegoat\"", "licenses-backend": "sbt dumpLicenseReport", From 80ea37b8a726e1b7217289a717bcc3c7b5c7fded Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 9 Jan 2025 15:06:41 +0100 Subject: [PATCH 356/361] fix merge related problems --- frontend/javascripts/oxalis/api/api_latest.ts | 1 - .../model/accessors/annotation_accessor.ts | 1 - .../oxalis/view/action-bar/merge_modal_view.tsx | 1 - .../oxalis/view/action-bar/toolbar_view.tsx | 2 +- .../view/layouting/tracing_layout_view.tsx | 2 +- .../test/reducers/save_reducer.spec.ts | 8 ++++---- .../test/sagas/saga_integration.spec.ts | 8 +++----- .../javascripts/test/sagas/save_saga.spec.ts | 16 ++++++++-------- frontend/javascripts/theme.tsx | 3 ++- 9 files changed, 19 insertions(+), 23 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index c35af2ff760..4c02316df52 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -1,6 +1,5 @@ import { doWithToken, - downsampleSegmentation, finishAnnotation, getMappingsForDatasetLayer, sendAnalyticsEvent, diff --git a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts index 7958f11f8e9..5fdb9a14233 100644 --- a/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/annotation_accessor.ts @@ -1,7 +1,6 @@ import _ from "lodash"; import type { OxalisState, Tracing } from "oxalis/store"; import type { EmptyObject } from "types/globals"; -import { getVolumeTracingById } from "./volumetracing_accessor"; export function mayEditAnnotationProperties(state: OxalisState) { const { owner, restrictions } = state.tracing; diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index 2c5e8a30a78..159e3a20568 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -1,6 +1,5 @@ import { getAnnotationCompoundInformation, - getAnnotationInformation, getTracingForAnnotationType, getUnversionedAnnotationInformation, } from "admin/admin_rest_api"; diff --git a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx index e366648a7fb..22c05fe5115 100644 --- a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx @@ -67,7 +67,7 @@ import { import { Model } from "oxalis/singletons"; import Store, { type BrushPresets, type OxalisState } from "oxalis/store"; import { MaterializeVolumeAnnotationModal } from "oxalis/view/action-bar/starting_job_modals"; -import ButtonComponent from "oxalis/view/components/button_component"; +import ButtonComponent, { ToggleButton } from "oxalis/view/components/button_component"; import { LogSliderSetting } from "oxalis/view/components/setting_input_views"; import { showToastWarningForLargestSegmentIdMissing } from "oxalis/view/largest_segment_id_modal"; import { userSettings } from "types/schemas/user_settings.schema"; diff --git a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx index 663b9ea83ba..4600a81cabb 100644 --- a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx +++ b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx @@ -1,4 +1,4 @@ -import { Layout } from "antd"; +import { ConfigProvider, Layout } from "antd"; import app from "app"; import ErrorHandling from "libs/error_handling"; import Request from "libs/request"; diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 5a4d54da82f..907fee27b34 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -16,14 +16,14 @@ const AccessorMock = { mockRequire("libs/date", DateMock); mockRequire("oxalis/model/accessors/annotation_accessor", AccessorMock); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions") as typeof import( +const SaveActions = mockRequire.reRequire( "oxalis/model/actions/save_actions", -); +) as typeof import("oxalis/model/actions/save_actions"); const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; -const { createEdge } = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( +const { createEdge } = mockRequire.reRequire( "oxalis/model/sagas/update_actions", -); +) as typeof import("oxalis/model/sagas/update_actions"); const tracingId = "1234567890"; const initialState = { diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index 17a85a24811..ca21bf98201 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -16,9 +16,7 @@ import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; const { createTreeMapFromTreeArray, generateTreeName } = - require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import( - "oxalis/model/reducers/skeletontracing_reducer_helpers", - ); + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", @@ -26,9 +24,9 @@ const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( const { discardSaveQueuesAction } = mockRequire.reRequire( "oxalis/model/actions/save_actions", ) as typeof import("oxalis/model/actions/save_actions"); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( +const UpdateActions = mockRequire.reRequire( "oxalis/model/sagas/update_actions", -); +) as typeof import("oxalis/model/sagas/update_actions"); test.beforeEach(async (t) => { // Setup oxalis, this will execute model.fetch(...) and initialize the store with the tracing, etc. diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 0c6a2a5e51b..107d06edcbd 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -18,24 +18,24 @@ mockRequire("libs/date", DateMock); mockRequire("oxalis/model/sagas/root_saga", function* () { yield; }); -const UpdateActions = mockRequire.reRequire("oxalis/model/sagas/update_actions") as typeof import( +const UpdateActions = mockRequire.reRequire( "oxalis/model/sagas/update_actions", -); -const SaveActions = mockRequire.reRequire("oxalis/model/actions/save_actions") as typeof import( +) as typeof import("oxalis/model/sagas/update_actions"); +const SaveActions = mockRequire.reRequire( "oxalis/model/actions/save_actions", -); -const { take, call, put } = mockRequire.reRequire("redux-saga/effects") as typeof import( +) as typeof import("oxalis/model/actions/save_actions"); +const { take, call, put } = mockRequire.reRequire( "redux-saga/effects", -); +) as typeof import("redux-saga/effects"); const { pushSaveQueueAsync, sendSaveRequestToServer, toggleErrorHighlighting, addVersionNumbers, sendRequestWithToken, -} = mockRequire.reRequire("oxalis/model/sagas/save_saga") as typeof import( +} = mockRequire.reRequire( "oxalis/model/sagas/save_saga", -); +) as typeof import("oxalis/model/sagas/save_saga"); const annotationId = "annotation-abcdefgh"; const tracingId = "tracing-1234567890"; diff --git a/frontend/javascripts/theme.tsx b/frontend/javascripts/theme.tsx index eb724dd65a0..ec761501811 100644 --- a/frontend/javascripts/theme.tsx +++ b/frontend/javascripts/theme.tsx @@ -1,4 +1,4 @@ -import { App, ConfigProvider, theme } from "antd"; +import { App, ConfigProvider, theme, ThemeConfig } from "antd"; import type { AliasToken, OverrideToken } from "antd/lib/theme/interface"; import { ToastContextMountRoot } from "libs/toast"; import window from "libs/window"; @@ -7,6 +7,7 @@ import type React from "react"; import { useEffect } from "react"; import { useSelector } from "react-redux"; import type { APIUser } from "types/api_flow_types"; +import _ from "lodash"; const ColorWKBlue = "#5660ff"; // WK ~blue/purple const ColorWKLinkHover = "#a8b4ff"; // slightly brighter WK Blue From dcd4a37a819ee612b972ad60209868f673c0bfb6 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 9 Jan 2025 15:14:09 +0100 Subject: [PATCH 357/361] make biome ignore some type imports because it produces invalid syntax, otherwise --- frontend/javascripts/test/reducers/save_reducer.spec.ts | 3 +++ frontend/javascripts/test/sagas/saga_integration.spec.ts | 8 +++++++- frontend/javascripts/test/sagas/save_saga.spec.ts | 4 ++++ frontend/javascripts/theme.tsx | 4 ++-- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/test/reducers/save_reducer.spec.ts b/frontend/javascripts/test/reducers/save_reducer.spec.ts index 907fee27b34..c7d6365243d 100644 --- a/frontend/javascripts/test/reducers/save_reducer.spec.ts +++ b/frontend/javascripts/test/reducers/save_reducer.spec.ts @@ -16,11 +16,14 @@ const AccessorMock = { mockRequire("libs/date", DateMock); mockRequire("oxalis/model/accessors/annotation_accessor", AccessorMock); +// biome-ignore format: biome produces invalid syntax when formatting this const SaveActions = mockRequire.reRequire( "oxalis/model/actions/save_actions", ) as typeof import("oxalis/model/actions/save_actions"); +// biome-ignore format: biome produces invalid syntax when formatting this const SaveReducer = mockRequire.reRequire("oxalis/model/reducers/save_reducer") .default as typeof import("oxalis/model/reducers/save_reducer")["default"]; +// biome-ignore format: biome produces invalid syntax when formatting this const { createEdge } = mockRequire.reRequire( "oxalis/model/sagas/update_actions", ) as typeof import("oxalis/model/sagas/update_actions"); diff --git a/frontend/javascripts/test/sagas/saga_integration.spec.ts b/frontend/javascripts/test/sagas/saga_integration.spec.ts index ca21bf98201..ab3566dbed7 100644 --- a/frontend/javascripts/test/sagas/saga_integration.spec.ts +++ b/frontend/javascripts/test/sagas/saga_integration.spec.ts @@ -16,16 +16,22 @@ import { hasRootSagaCrashed } from "oxalis/model/sagas/root_saga"; import { omit } from "lodash"; const { createTreeMapFromTreeArray, generateTreeName } = - require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import("oxalis/model/reducers/skeletontracing_reducer_helpers"); + // biome-ignore format: biome produces invalid syntax when formatting this + require("oxalis/model/reducers/skeletontracing_reducer_helpers") as typeof import( + "oxalis/model/reducers/skeletontracing_reducer_helpers" + ); const { addTreesAndGroupsAction, deleteNodeAction } = mockRequire.reRequire( "oxalis/model/actions/skeletontracing_actions", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/actions/skeletontracing_actions"); const { discardSaveQueuesAction } = mockRequire.reRequire( "oxalis/model/actions/save_actions", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/actions/save_actions"); const UpdateActions = mockRequire.reRequire( "oxalis/model/sagas/update_actions", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/sagas/update_actions"); test.beforeEach(async (t) => { diff --git a/frontend/javascripts/test/sagas/save_saga.spec.ts b/frontend/javascripts/test/sagas/save_saga.spec.ts index 107d06edcbd..4e1e4b8092a 100644 --- a/frontend/javascripts/test/sagas/save_saga.spec.ts +++ b/frontend/javascripts/test/sagas/save_saga.spec.ts @@ -20,12 +20,15 @@ mockRequire("oxalis/model/sagas/root_saga", function* () { }); const UpdateActions = mockRequire.reRequire( "oxalis/model/sagas/update_actions", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/sagas/update_actions"); const SaveActions = mockRequire.reRequire( "oxalis/model/actions/save_actions", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/actions/save_actions"); const { take, call, put } = mockRequire.reRequire( "redux-saga/effects", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("redux-saga/effects"); const { pushSaveQueueAsync, @@ -35,6 +38,7 @@ const { sendRequestWithToken, } = mockRequire.reRequire( "oxalis/model/sagas/save_saga", + // biome-ignore format: biome produces invalid syntax when formatting this ) as typeof import("oxalis/model/sagas/save_saga"); const annotationId = "annotation-abcdefgh"; diff --git a/frontend/javascripts/theme.tsx b/frontend/javascripts/theme.tsx index ec761501811..53f49ca5ca2 100644 --- a/frontend/javascripts/theme.tsx +++ b/frontend/javascripts/theme.tsx @@ -1,13 +1,13 @@ -import { App, ConfigProvider, theme, ThemeConfig } from "antd"; +import { App, ConfigProvider, type ThemeConfig, theme } from "antd"; import type { AliasToken, OverrideToken } from "antd/lib/theme/interface"; import { ToastContextMountRoot } from "libs/toast"; import window from "libs/window"; +import _ from "lodash"; import type { OxalisState, Theme } from "oxalis/store"; import type React from "react"; import { useEffect } from "react"; import { useSelector } from "react-redux"; import type { APIUser } from "types/api_flow_types"; -import _ from "lodash"; const ColorWKBlue = "#5660ff"; // WK ~blue/purple const ColorWKLinkHover = "#a8b4ff"; // slightly brighter WK Blue From eabcd4d4d416e78f9d309b8441be305681783fee Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 13 Jan 2025 14:03:02 +0100 Subject: [PATCH 358/361] more batching for migration --- .../migration.py | 146 ++++++------------ 1 file changed, 46 insertions(+), 100 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 456a1780f77..214efda3fc4 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -14,7 +14,6 @@ from functools import partial import heapq import sys -from viztracer import VizTracer import fossildbapi_pb2 as proto import VolumeTracing_pb2 as Volume @@ -74,8 +73,6 @@ def migrate_annotation(self, annotation): if versions > 1: logger.info(f"{versions} versions for {annotation['_id']}{self.get_progress()}") else: - #tracer = VizTracer(tracer_entries=10000000) - #tracer.start() if self.args.verbose: logger.info(f"Migrating annotation {annotation['_id']} (dry={self.args.dry}) ...") mapping_id_map = self.build_mapping_id_map(annotation) @@ -86,8 +83,6 @@ def migrate_annotation(self, annotation): if len(materialized_versions) == 0: raise ValueError(f"Zero materialized versions present in source FossilDB for annotation {annotation['_id']}.") self.create_and_save_annotation_proto(annotation, materialized_versions, mapping_id_map) - #tracer.stop() - #tracer.save() if time.time() - before > 1 or self.args.verbose: log_since(before, f"Migrating annotation {annotation['_id']} ({len(materialized_versions)} materialized versions)", self.get_progress()) checkpoint_logger.info(annotation['_id']) @@ -114,7 +109,7 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: updates_for_layer = [] included_revert = False next_version = newest_version - for batch_start, batch_end in reversed(list(batch_range(newest_version + 1, batch_size))): # TODO check overlaps? + for batch_start, batch_end in reversed(list(batch_range(newest_version + 1, batch_size))): if batch_start > next_version: continue update_groups = self.get_update_batch(tracing_or_mapping_id, collection, batch_start, batch_end - 1) @@ -133,7 +128,6 @@ def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: return updates_for_layer, included_revert def includes_revert(self, annotation) -> bool: - logger.info("checking if includes revert") json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() layers = list(annotation["layers"].items()) @@ -145,9 +139,6 @@ def includes_revert(self, annotation) -> bool: return False def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVersionMapping: - put_updates_buffer_size = 500 - logger.info("migrating updates...") - before = time.time() all_update_groups = [] json_encoder = msgspec.json.Encoder() json_decoder = msgspec.json.Decoder() @@ -163,13 +154,13 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers layer_updates, _ = self.fetch_updates(mapping_id, "editableMapping", "editableMappingUpdates", json_encoder=json_encoder, json_decoder=json_decoder) all_update_groups.append(layer_updates) tracing_ids_and_mapping_ids.append(mapping_id) - log_since(before, "fetch updates") unified_version = 0 version_mapping = {} for tracing_or_mapping_id in tracing_ids_and_mapping_ids: version_mapping[tracing_or_mapping_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. + put_updates_buffer_size = 5000 buffered_versions_to_put = [] buffered_updates_to_put = [] # We use a priority queue to efficiently select which tracing each next update should come from. @@ -199,10 +190,9 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers heapq.heappush(queue, (next_element, layer_index, element_index + 1)) if len(buffered_versions_to_put) > 0: - # flush + # flush rest self.save_update_groups(annotation['_id'], buffered_versions_to_put, buffered_updates_to_put) - log_since(before, "updates total") return version_mapping def get_editable_mapping_id(self, tracing_id: str, layer_type: str) -> Optional[str]: @@ -270,7 +260,7 @@ def process_update_group(self, tracing_id: str, layer_type: str, update_group_ra return json_encoder.encode(update_group_parsed), action_timestamp, revert_source_version def save_update_groups(self, annotation_id: str, versions: List[int], update_groups_raw: List[bytes]) -> None: - self.save_multiple_versions(collection="annotationUpdates", key=annotation_id, versions=versions, values=update_groups_raw) + self.put_multiple_versions(collection="annotationUpdates", key=annotation_id, versions=versions, values=update_groups_raw) def get_newest_version(self, tracing_id: str, collection: str) -> int: getReply = self.src_stub.Get( @@ -303,7 +293,6 @@ def migrate_materialized_layers(self, annotation: RealDictRow, layer_version_map return materialized_versions def migrate_materialized_layer(self, tracing_id: str, layer_type: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: - logger.info(f"migrating materialized {layer_type} layer {tracing_id}...") if layer_type == "Skeleton": return self.migrate_skeleton_proto(tracing_id, layer_version_mapping) if layer_type == "Volume": @@ -332,12 +321,11 @@ def migrate_skeleton_proto(self, tracing_id: str, layer_version_mapping: LayerVe return materialized_versions_unified def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): - volume_proto_page_size = 500 - logger.info("migrating volume protos...") + volume_proto_page_size = 2000 collection = "volumes" materialized_versions_unified = [] - before = time.time() newest_tracing_version = max(layer_version_mapping[tracing_id].keys()) + count = 0 for version_range_start, version_range_end in batch_range(newest_tracing_version + 1, volume_proto_page_size): reply = self.src_stub.GetMultipleVersions( proto.GetMultipleVersionsRequest(collection=collection, key=tracing_id, oldestVersion=version_range_start, newestVersion=version_range_end - 1) @@ -356,35 +344,10 @@ def migrate_volume_proto(self, tracing_id: str, layer_version_mapping: LayerVers volume.mappingName = tracing_id value_bytes = volume.SerializeToString() materialized_versions_unified.append(new_version) + count += 1 versions_to_put.append(new_version) values_to_put.append(value_bytes) self.put_multiple_versions(collection, tracing_id, versions_to_put, values_to_put) - log_since(before, "volume proto total") - return materialized_versions_unified - - def migrate_volume_proto_LEGACY(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap): - logger.info("migrating volume protos...") - collection = "volumes" - materialized_versions_unified = [] - before = time.time() - materialized_versions = self.list_versions(collection, tracing_id) - print(materialized_versions) - log_since(before, "list versions volume proto") - for materialized_version in materialized_versions: - if materialized_version not in layer_version_mapping[tracing_id]: - continue - new_version = layer_version_mapping[tracing_id][materialized_version] - value_bytes = self.get_bytes(collection, tracing_id, materialized_version) - if materialized_version != new_version or tracing_id in mapping_id_map: - volume = Volume.VolumeTracing() - volume.ParseFromString(value_bytes) - volume.version = new_version - if tracing_id in mapping_id_map: - volume.mappingName = tracing_id - value_bytes = volume.SerializeToString() - materialized_versions_unified.append(new_version) - self.save_bytes(collection, tracing_id, new_version, value_bytes) - log_since(before, "volume proto total") return materialized_versions_unified def list_versions(self, collection, key) -> List[int]: @@ -409,70 +372,37 @@ def put_multiple_versions(self, collection: str, key: str, versions: List[int], def put_multiple_keys_versions(self, collection: str, to_put) -> None: if self.dst_stub is not None: + before = time.time() reply = self.dst_stub.PutMultipleKeysWithMultipleVersions(proto.PutMultipleKeysWithMultipleVersionsRequest(collection=collection, versionedKeyValuePairs = to_put)) assert_grpc_success(reply) - # TODO remove if multi-put is faster - def save_multiple_versions(self, collection: str, key: str, versions: List[int], values: List[bytes]) -> None: - if self.dst_stub is not None: - reply = self.dst_stub.PutMultipleVersions(proto.PutMultipleVersionsRequest(collection=collection, key=key, versions=versions, values=values)) - assert_grpc_success(reply) - def migrate_volume_buckets(self, tracing_id: str, layer_version_mapping: LayerVersionMapping): - logger.info("migrating volume buckets...") - before = time.time() self.migrate_all_versions_and_keys_with_prefix("volumeData", tracing_id, layer_version_mapping, transform_key=self.remove_morton_index) - log_since(before, "migrating volume buckets") - - - def migrate_all_versions_and_keys_with_prefix_LEGACY(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): - list_keys_page_size = 5000 - versions_page_size = 500 - current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / - newest_tracing_version = max(layer_version_mapping[tracing_or_mapping_id].keys()) - while True: - list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key)) - assert_grpc_success(list_keys_reply) - if len(list_keys_reply.keys) == 0: - # We iterated towards the very end of the collection - return - for key in list_keys_reply.keys: - if key.startswith(tracing_or_mapping_id): - for version_range_start, version_range_end in batch_range(newest_tracing_version, versions_page_size): - get_versions_reply = self.src_stub.GetMultipleVersions(proto.GetMultipleVersionsRequest(collection=collection, key=key, oldestVersion=version_range_start, newestVersion=version_range_end)) - assert_grpc_success(get_versions_reply) - new_key = key - if transform_key is not None: - new_key = transform_key(key) - versions_to_save = [] - values_to_save = [] - for version, value in zip(get_versions_reply.versions, get_versions_reply.values): - if version not in layer_version_mapping[tracing_or_mapping_id]: - continue - new_version = layer_version_mapping[tracing_or_mapping_id][version] - versions_to_save.append(new_version) - values_to_save.append(value) - self.save_multiple_versions(collection, new_key, versions_to_save, values_to_save) - current_start_after_key = key - else: - # We iterated past the elements of the current tracing - return - - def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]]): + def migrate_all_versions_and_keys_with_prefix( + self, + collection: str, + tracing_or_mapping_id: str, + layer_version_mapping: LayerVersionMapping, + transform_key: Optional[Callable[[str], str]], + get_keys_page_size = 500, + put_buffer_size = 5000 + ): list_keys_page_size = 10000 - get_keys_page_size = 500 + put_buffer = [] current_start_after_key = tracing_or_mapping_id + "." # . is lexicographically before / while True: list_keys_reply = self.src_stub.ListKeys(proto.ListKeysRequest(collection=collection, limit=list_keys_page_size, startAfterKey=current_start_after_key, prefix=tracing_or_mapping_id)) assert_grpc_success(list_keys_reply) if len(list_keys_reply.keys) == 0: # We iterated towards the very end of the collection + if len(put_buffer) > 0: + self.put_multiple_keys_versions(collection, put_buffer) return for key_batch in batch_list(list_keys_reply.keys, get_keys_page_size): get_keys_with_versions_reply = self.src_stub.GetMultipleKeysByListWithMultipleVersions(proto.GetMultipleKeysByListWithMultipleVersionsRequest(collection=collection, keys=key_batch)) assert_grpc_success(get_keys_with_versions_reply) - to_put = [] + for keyVersionsValuesPair in get_keys_with_versions_reply.keyVersionsValuesPairs: key = keyVersionsValuesPair.key if not key.startswith(tracing_or_mapping_id): @@ -485,22 +415,22 @@ def migrate_all_versions_and_keys_with_prefix(self, collection: str, tracing_or_ continue new_version = layer_version_mapping[tracing_or_mapping_id][version_value_pair.actualVersion] versioned_key_value_pair = proto.VersionedKeyValuePairProto() - versioned_key_value_pair.key = key + versioned_key_value_pair.key = new_key versioned_key_value_pair.version = new_version versioned_key_value_pair.value = version_value_pair.value - to_put.append(versioned_key_value_pair) + put_buffer.append(versioned_key_value_pair) + if len(put_buffer) >= put_buffer_size: + self.put_multiple_keys_versions(collection, put_buffer) + put_buffer = [] - self.put_multiple_keys_versions(collection, to_put) current_start_after_key = list_keys_reply.keys[-1] def migrate_segment_index(self, tracing_id, layer_version_mapping): - logger.info("migrating volume segment index...") self.migrate_all_versions_and_keys_with_prefix("volumeSegmentIndex", tracing_id, layer_version_mapping, transform_key=None) def migrate_editable_mapping(self, tracing_id: str, layer_version_mapping: LayerVersionMapping, mapping_id_map: MappingIdMap) -> List[int]: if tracing_id not in mapping_id_map: return [] - logger.info(f"migrating editable mapping of tracing {tracing_id}...") mapping_id = mapping_id_map[tracing_id] materialized_versions = self.migrate_editable_mapping_info(tracing_id, mapping_id, layer_version_mapping) self.migrate_editable_mapping_agglomerate_to_graph(tracing_id, mapping_id, layer_version_mapping) @@ -525,7 +455,9 @@ def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, mapping "editableMappingsAgglomerateToGraph", mapping_id, layer_version_mapping, - transform_key=partial(self.replace_before_first_slash, tracing_id) + transform_key=partial(self.replace_before_first_slash, tracing_id), + get_keys_page_size = 100, + put_buffer_size = 100 ) def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): @@ -533,11 +465,12 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi "editableMappingsSegmentToAgglomerate", mapping_id, layer_version_mapping, - transform_key=partial(self.replace_before_first_slash, tracing_id) + transform_key=partial(self.replace_before_first_slash, tracing_id), + get_keys_page_size = 300, + put_buffer_size = 300 ) def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): - logger.info("writing annotationProtos...") skeleton_may_have_pending_updates = self.skeleton_may_have_pending_updates(annotation) editable_mapping_may_have_pending_updates = bool(mapping_id_map) # same problem as with skeletons, see comment there earliest_accessible_version = 0 @@ -548,6 +481,9 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se # So we forbid it. earliest_accessible_version = max(materialized_versions) # We write an annotationProto object for every materialized version of every layer. + put_buffer_size = 5000 + versions_to_put_buffer = [] + values_to_put_buffer = [] for version in materialized_versions: annotationProto = AnnotationProto.AnnotationProto() annotationProto.description = annotation["description"] or "" @@ -566,7 +502,17 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se layer_type_proto = AnnotationProto.AnnotationLayerTypeProto.Volume layer_proto.typ = layer_type_proto annotationProto.annotationLayers.append(layer_proto) - self.save_bytes(collection="annotations", key=annotation["_id"], version=version, value=annotationProto.SerializeToString()) + versions_to_put_buffer.append(version) + values_to_put_buffer.append(annotationProto.SerializeToString()) + if len(versions_to_put_buffer) >= put_buffer_size: + # flush + self.put_multiple_versions("annotations", key=annotation["_id"], versions=versions_to_put_buffer, values=values_to_put_buffer) + versions_to_put_buffer = [] + values_to_put_buffer = [] + + if len(versions_to_put_buffer) > 0: + # flush rest + self.put_multiple_versions("annotations", key=annotation["_id"], versions=versions_to_put_buffer, values=values_to_put_buffer) def skeleton_may_have_pending_updates(self, annotation) -> bool: # Skeletons in the old code had their updates applied lazily. From 8f3b2126e1a86379311b504259a1c469b9c37d0d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 13 Jan 2025 14:51:55 +0100 Subject: [PATCH 359/361] include actionTracingId, actionTimestamp in compact action writes --- .../tracingstore/tracings/volume/VolumeUpdateActions.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index f7cb2a2808f..c4e5fae0be6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -369,7 +369,12 @@ object CompactVolumeUpdateAction { } yield CompactVolumeUpdateAction(name, actionTracingId, actionTimestamp, actionAuthorId, value) override def writes(o: CompactVolumeUpdateAction): JsValue = - Json.obj("name" -> o.name, "value" -> (Json.obj("actionTimestamp" -> o.actionTimestamp) ++ o.value)) + Json.obj( + "name" -> o.name, + "value" -> (Json.obj("actionTracingId" -> o.actionTracingId, + "actionTimestamp" -> o.actionTimestamp, + "actionAuthorId" -> o.actionAuthorId) ++ o.value) + ) } } From 0f0271e2fbb6cb3b59abdbb0b1c36178f24696a4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 13 Jan 2025 15:01:22 +0100 Subject: [PATCH 360/361] Fix annotation duplicate --- .../tracingstore/annotation/TSAnnotationService.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala index 621b126555b..1a7bf5985b1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/annotation/TSAnnotationService.scala @@ -760,6 +760,8 @@ class TSAnnotationService @Inject()(val remoteWebknossosClient: TSRemoteWebknoss for { mappedTracingId <- tracingIdMapMutable.get(a.actionTracingId) ?~> "duplicating action for unknown layer" } yield a.withActionTracingId(mappedTracingId) + case a: UpdateAction => + Fox.successful(a) } _ <- tracingDataStore.annotationUpdates.put(newAnnotationId, version, Json.toJson(updateListAdapted)) } yield () From 4acdf63a758255a4bdf319ba34625a3584e657f6 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 13 Jan 2025 16:23:45 +0000 Subject: [PATCH 361/361] decrease batch sizes to avoid grpc message size limits --- .../migration.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tools/migration-unified-annotation-versioning/migration.py b/tools/migration-unified-annotation-versioning/migration.py index 214efda3fc4..74a33791081 100644 --- a/tools/migration-unified-annotation-versioning/migration.py +++ b/tools/migration-unified-annotation-versioning/migration.py @@ -104,7 +104,7 @@ def build_mapping_id_map(self, annotation) -> MappingIdMap: return mapping_id_map def fetch_updates(self, tracing_or_mapping_id: str, layer_type: str, collection: str, json_encoder, json_decoder) -> Tuple[List[Tuple[int, int, bytes]], bool]: - batch_size = 1000 + batch_size = 100 newest_version = self.get_newest_version(tracing_or_mapping_id, collection) updates_for_layer = [] included_revert = False @@ -160,7 +160,7 @@ def migrate_updates(self, annotation, mapping_id_map: MappingIdMap) -> LayerVers for tracing_or_mapping_id in tracing_ids_and_mapping_ids: version_mapping[tracing_or_mapping_id] = {0: 0} # We always want to keep the initial version 0 of all layers, even if there are no updates at all. - put_updates_buffer_size = 5000 + put_updates_buffer_size = 100 buffered_versions_to_put = [] buffered_updates_to_put = [] # We use a priority queue to efficiently select which tracing each next update should come from. @@ -385,8 +385,8 @@ def migrate_all_versions_and_keys_with_prefix( tracing_or_mapping_id: str, layer_version_mapping: LayerVersionMapping, transform_key: Optional[Callable[[str], str]], - get_keys_page_size = 500, - put_buffer_size = 5000 + get_keys_page_size = 200, + put_buffer_size = 1000 ): list_keys_page_size = 10000 put_buffer = [] @@ -456,8 +456,8 @@ def migrate_editable_mapping_agglomerate_to_graph(self, tracing_id: str, mapping mapping_id, layer_version_mapping, transform_key=partial(self.replace_before_first_slash, tracing_id), - get_keys_page_size = 100, - put_buffer_size = 100 + get_keys_page_size = 20, + put_buffer_size = 20 ) def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mapping_id: str, layer_version_mapping: LayerVersionMapping): @@ -466,8 +466,8 @@ def migrate_editable_mapping_segment_to_agglomerate(self, tracing_id: str, mappi mapping_id, layer_version_mapping, transform_key=partial(self.replace_before_first_slash, tracing_id), - get_keys_page_size = 300, - put_buffer_size = 300 + get_keys_page_size = 30, + put_buffer_size = 30 ) def create_and_save_annotation_proto(self, annotation, materialized_versions: Set[int], mapping_id_map: MappingIdMap): @@ -481,7 +481,7 @@ def create_and_save_annotation_proto(self, annotation, materialized_versions: Se # So we forbid it. earliest_accessible_version = max(materialized_versions) # We write an annotationProto object for every materialized version of every layer. - put_buffer_size = 5000 + put_buffer_size = 1000 versions_to_put_buffer = [] values_to_put_buffer = [] for version in materialized_versions: